# HG changeset patch # User Artem Tikhomirov # Date 1295835285 -3600 # Node ID 6f1b88693d48422e98c3eaaa8428ffd4d4d98ca7 # Parent 0d279bcc44427cb5ae2f3407c02f21187ccc8aea Complete refactoring to org.tmatesoft diff -r 0d279bcc4442 -r 6f1b88693d48 cmdline/org/tmatesoft/hg/console/Bundle.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cmdline/org/tmatesoft/hg/console/Bundle.java Mon Jan 24 03:14:45 2011 +0100 @@ -0,0 +1,47 @@ +/* + * Copyright (c) 2011 TMate Software Ltd + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; version 2 of the License. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * For information on how to redistribute this software under + * the terms of a license other than GNU General Public License + * contact TMate Software at support@svnkit.com + */ +package org.tmatesoft.hg.console; + +import java.io.File; + +import org.tmatesoft.hg.internal.DataAccessProvider; +import org.tmatesoft.hg.repo.HgBundle; +import org.tmatesoft.hg.repo.HgRepository; + + +/** + * WORK IN PROGRESS, DO NOT USE + * + * @author Artem Tikhomirov + * @author TMate Software Ltd. + */ +public class Bundle { + + public static void main(String[] args) throws Exception { + Options cmdLineOpts = Options.parse(args); + HgRepository hgRepo = cmdLineOpts.findRepository(); + if (hgRepo.isInvalid()) { + System.err.printf("Can't find repository in: %s\n", hgRepo.getLocation()); + return; + } + File bundleFile = new File("/temp/hg/hg-bundle-a78c980749e3.tmp"); + DataAccessProvider dap = new DataAccessProvider(); + HgBundle hgBundle = new HgBundle(dap, bundleFile); +// hgBundle.dump(); + hgBundle.changes(hgRepo); + } +} diff -r 0d279bcc4442 -r 6f1b88693d48 cmdline/org/tmatesoft/hg/console/Cat.java --- a/cmdline/org/tmatesoft/hg/console/Cat.java Sun Jan 23 04:06:18 2011 +0100 +++ b/cmdline/org/tmatesoft/hg/console/Cat.java Mon Jan 24 03:14:45 2011 +0100 @@ -16,11 +16,11 @@ */ package org.tmatesoft.hg.console; -import com.tmate.hgkit.fs.RepositoryLookup; -import com.tmate.hgkit.ll.DigestHelper; -import com.tmate.hgkit.ll.HgDataFile; -import com.tmate.hgkit.ll.HgRepository; -import com.tmate.hgkit.ll.Internals; +import org.tmatesoft.hg.internal.DigestHelper; +import org.tmatesoft.hg.repo.HgDataFile; +import org.tmatesoft.hg.repo.HgRepository; +import org.tmatesoft.hg.repo.Internals; + /** * @author Artem Tikhomirov @@ -29,9 +29,8 @@ public class Cat { public static void main(String[] args) throws Exception { - RepositoryLookup repoLookup = new RepositoryLookup(); - RepositoryLookup.Options cmdLineOpts = RepositoryLookup.Options.parse(args); - HgRepository hgRepo = repoLookup.detect(cmdLineOpts); + Options cmdLineOpts = Options.parse(args); + HgRepository hgRepo = cmdLineOpts.findRepository(); if (hgRepo.isInvalid()) { System.err.printf("Can't find repository in: %s\n", hgRepo.getLocation()); return; diff -r 0d279bcc4442 -r 6f1b88693d48 cmdline/org/tmatesoft/hg/console/Incoming.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cmdline/org/tmatesoft/hg/console/Incoming.java Mon Jan 24 03:14:45 2011 +0100 @@ -0,0 +1,200 @@ +/* + * Copyright (c) 2011 TMate Software Ltd + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; version 2 of the License. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * For information on how to redistribute this software under + * the terms of a license other than GNU General Public License + * contact TMate Software at support@svnkit.com + */ +package org.tmatesoft.hg.console; + +import java.util.Collection; +import java.util.HashSet; +import java.util.LinkedHashSet; +import java.util.LinkedList; +import java.util.List; + +import org.tmatesoft.hg.core.Nodeid; +import org.tmatesoft.hg.repo.Changelog; +import org.tmatesoft.hg.repo.HgRepository; + + +/** + * WORK IN PROGRESS, DO NOT USE + * hg in counterpart + * + * @author Artem Tikhomirov + * @author TMate Software Ltd. + */ +public class Incoming { + + public static void main(String[] args) throws Exception { + Options cmdLineOpts = Options.parse(args); + HgRepository hgRepo = cmdLineOpts.findRepository(); + if (hgRepo.isInvalid()) { + System.err.printf("Can't find repository in: %s\n", hgRepo.getLocation()); + return; + } + // in fact, all we need from changelog is set of all nodeids. However, since ParentWalker reuses same Nodeids, it's not too expensive + // to reuse it here, XXX although later this may need to be refactored + final Changelog.ParentWalker pw = hgRepo.getChangelog().new ParentWalker(); + pw.init(); + // + HashSet base = new HashSet(); + HashSet unknownRemoteHeads = new HashSet(); + // imagine empty repository - any nodeid from remote heads would be unknown + unknownRemoteHeads.add(Nodeid.fromAscii("382cfe9463db0484a14136e4b38407419525f0c0".getBytes(), 0, 40)); + // + LinkedList remoteBranches = new LinkedList(); + remoteBranches(unknownRemoteHeads, remoteBranches); + // + HashSet visited = new HashSet(); + HashSet processed = new HashSet(); + LinkedList toScan = new LinkedList(); + LinkedHashSet toFetch = new LinkedHashSet(); + // next one seems to track heads we've asked (or plan to ask) remote.branches for + HashSet unknownHeads /*req*/ = new HashSet(unknownRemoteHeads); + while (!remoteBranches.isEmpty()) { + LinkedList toQueryRemote = new LinkedList(); + while (!remoteBranches.isEmpty()) { + RemoteBranch next = remoteBranches.removeFirst(); + if (visited.contains(next.head) || processed.contains(next)) { + continue; + } + if (Nodeid.NULL.equals(next.head)) { + // it's discovery.py that expects next.head to be nullid here, I can't imagine how this may happen, hence this exception + throw new IllegalStateException("I wonder if null if may ever get here with remote branches"); + } else if (pw.knownNode(next.root)) { + // root of the remote change is known locally, analyze to find exact missing changesets + toScan.addLast(new Nodeid[] { next.head, next.root }); + processed.add(next); + } else { + if (!visited.contains(next.root) && !toFetch.contains(next.root)) { + // if parents are locally known, this is new branch (sequence of changes) (sequence sprang out of known parents) + if ((next.p1 == null || pw.knownNode(next.p1)) && (next.p2 == null || pw.knownNode(next.p2))) { + toFetch.add(next.root); + } + // XXX perhaps, may combine this parent processing below (I don't understand what this code is exactly about) + if (pw.knownNode(next.p1)) { + base.add(next.p1); + } + if (pw.knownNode(next.p2)) { + base.add(next.p2); + } + } + if (next.p1 != null && !pw.knownNode(next.p1) && !unknownHeads.contains(next.p1)) { + toQueryRemote.add(next.p1); + unknownHeads.add(next.p1); + } + if (next.p2 != null && !pw.knownNode(next.p2) && !unknownHeads.contains(next.p2)) { + toQueryRemote.add(next.p2); + unknownHeads.add(next.p2); + } + } + visited.add(next.head); + } + if (!toQueryRemote.isEmpty()) { + // discovery.py in fact does this in batches of 10 revisions a time. + // however, this slicing may be done in remoteBranches call instead (if needed) + remoteBranches(toQueryRemote, remoteBranches); + } + } + while (!toScan.isEmpty()) { + Nodeid[] head_root = toScan.removeFirst(); + List nodesBetween = remoteBetween(head_root[0], head_root[1], new LinkedList()); + nodesBetween.add(head_root[1]); + int x = 1; + Nodeid p = head_root[0]; + for (Nodeid i : nodesBetween) { + System.out.println("narrowing " + x + ":" + nodesBetween.size() + " " + i.shortNotation()); + if (pw.knownNode(i)) { + if (x <= 2) { + toFetch.add(p); + base.add(i); + } else { + // XXX original discovery.py collects new elements to scan separately + // likely to "batch" calls to server + System.out.println("narrowed branch search to " + p.shortNotation() + ":" + i.shortNotation()); + toScan.addLast(new Nodeid[] { p, i }); + } + break; + } + x = x << 1; + p = i; + } + } + for (Nodeid n : toFetch) { + if (pw.knownNode(n)) { + System.out.println("Erroneous to fetch:" + n); + } else { + System.out.println(n); + } + } + + } + + static final class RemoteBranch { + public Nodeid head, root, p1, p2; + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (false == obj instanceof RemoteBranch) { + return false; + } + RemoteBranch o = (RemoteBranch) obj; + return head.equals(o.head) && root.equals(o.root) && (p1 == null && o.p1 == null || p1.equals(o.p1)) && (p2 == null && o.p2 == null || p2.equals(o.p2)); + } + } + + private static void remoteBranches(Collection unknownRemoteHeads, List remoteBranches) { + // discovery.findcommonincoming: + // unknown = remote.branches(remote.heads); + // sent: cmd=branches&roots=d6d2a630f4a6d670c90a5ca909150f2b426ec88f+ + // received: d6d2a630f4a6d670c90a5ca909150f2b426ec88f dbd663faec1f0175619cf7668bddc6350548b8d6 0000000000000000000000000000000000000000 0000000000000000000000000000000000000000 + // head, root, first parent, second parent + // + // TODO implement this with remote access + // + RemoteBranch rb = new RemoteBranch(); + rb.head = unknownRemoteHeads.iterator().next(); + rb.root = Nodeid.fromAscii("dbd663faec1f0175619cf7668bddc6350548b8d6".getBytes(), 0, 40); + remoteBranches.add(rb); + } + + private static List remoteBetween(Nodeid nodeid1, Nodeid nodeid2, List list) { + // sent: cmd=between&pairs=d6d2a630f4a6d670c90a5ca909150f2b426ec88f-dbd663faec1f0175619cf7668bddc6350548b8d6 + // received: a78c980749e3ccebb47138b547e9b644a22797a9 286d221f6c28cbfce25ea314e1f46a23b7f979d3 fc265ddeab262ff5c34b4cf4e2522d8d41f1f05b a3576694a4d1edaa681cab15b89d6b556b02aff4 + // 1st, 2nd, fourth and eights of total 8 changes between rev9 and rev0 + // + // + // a78c980749e3ccebb47138b547e9b644a22797a9 286d221f6c28cbfce25ea314e1f46a23b7f979d3 fc265ddeab262ff5c34b4cf4e2522d8d41f1f05b a3576694a4d1edaa681cab15b89d6b556b02aff4 + //d6d2a630f4a6d670c90a5ca909150f2b426ec88f a78c980749e3ccebb47138b547e9b644a22797a9 5abe5af181bd6a6d3e94c378376c901f0f80da50 08db726a0fb7914ac9d27ba26dc8bbf6385a0554 + + // TODO implement with remote access + String response = null; + if (nodeid1.equals(Nodeid.fromAscii("382cfe9463db0484a14136e4b38407419525f0c0".getBytes(), 0, 40)) && nodeid2.equals(Nodeid.fromAscii("dbd663faec1f0175619cf7668bddc6350548b8d6".getBytes(), 0, 40))) { + response = "d6d2a630f4a6d670c90a5ca909150f2b426ec88f a78c980749e3ccebb47138b547e9b644a22797a9 5abe5af181bd6a6d3e94c378376c901f0f80da50 08db726a0fb7914ac9d27ba26dc8bbf6385a0554"; + } else if (nodeid1.equals(Nodeid.fromAscii("a78c980749e3ccebb47138b547e9b644a22797a9".getBytes(), 0, 40)) && nodeid2.equals(Nodeid.fromAscii("5abe5af181bd6a6d3e94c378376c901f0f80da50".getBytes(), 0, 40))) { + response = "286d221f6c28cbfce25ea314e1f46a23b7f979d3"; + } + if (response == null) { + throw HgRepository.notImplemented(); + } + for (String s : response.split(" ")) { + list.add(Nodeid.fromAscii(s.getBytes(), 0, 40)); + } + return list; + } + +} diff -r 0d279bcc4442 -r 6f1b88693d48 cmdline/org/tmatesoft/hg/console/Log.java --- a/cmdline/org/tmatesoft/hg/console/Log.java Sun Jan 23 04:06:18 2011 +0100 +++ b/cmdline/org/tmatesoft/hg/console/Log.java Mon Jan 24 03:14:45 2011 +0100 @@ -23,13 +23,12 @@ import org.tmatesoft.hg.core.Cset; import org.tmatesoft.hg.core.LogCommand; import org.tmatesoft.hg.core.LogCommand.FileRevision; +import org.tmatesoft.hg.core.Nodeid; import org.tmatesoft.hg.core.Path; +import org.tmatesoft.hg.repo.Changelog; +import org.tmatesoft.hg.repo.HgDataFile; +import org.tmatesoft.hg.repo.HgRepository; -import com.tmate.hgkit.fs.RepositoryLookup; -import com.tmate.hgkit.ll.HgDataFile; -import com.tmate.hgkit.ll.HgRepository; -import com.tmate.hgkit.ll.Nodeid; -import com.tmate.hgkit.ll.Revlog; /** * @author Artem Tikhomirov @@ -38,9 +37,8 @@ public class Log { public static void main(String[] args) throws Exception { - RepositoryLookup repoLookup = new RepositoryLookup(); - RepositoryLookup.Options cmdLineOpts = RepositoryLookup.Options.parse(args); - HgRepository hgRepo = repoLookup.detect(cmdLineOpts); + Options cmdLineOpts = Options.parse(args); + HgRepository hgRepo = cmdLineOpts.findRepository(); if (hgRepo.isInvalid()) { System.err.printf("Can't find repository in: %s\n", hgRepo.getLocation()); return; @@ -124,7 +122,7 @@ // own private LinkedList l = new LinkedList(); private final HgRepository repo; - private Revlog.ParentWalker changelogWalker; + private Changelog.ParentWalker changelogWalker; private final int tip ; public Dump(HgRepository hgRepo) { diff -r 0d279bcc4442 -r 6f1b88693d48 cmdline/org/tmatesoft/hg/console/Manifest.java --- a/cmdline/org/tmatesoft/hg/console/Manifest.java Sun Jan 23 04:06:18 2011 +0100 +++ b/cmdline/org/tmatesoft/hg/console/Manifest.java Mon Jan 24 03:14:45 2011 +0100 @@ -16,16 +16,15 @@ */ package org.tmatesoft.hg.console; -import static com.tmate.hgkit.ll.HgRepository.TIP; +import static org.tmatesoft.hg.repo.HgRepository.TIP; +import org.tmatesoft.hg.core.LogCommand.FileRevision; +import org.tmatesoft.hg.core.Nodeid; import org.tmatesoft.hg.core.Path; import org.tmatesoft.hg.core.RepositoryTreeWalker; -import org.tmatesoft.hg.core.LogCommand.FileRevision; +import org.tmatesoft.hg.repo.HgManifest; +import org.tmatesoft.hg.repo.HgRepository; -import com.tmate.hgkit.fs.RepositoryLookup; -import com.tmate.hgkit.ll.HgManifest; -import com.tmate.hgkit.ll.HgRepository; -import com.tmate.hgkit.ll.Nodeid; /** * @@ -35,9 +34,8 @@ public class Manifest { public static void main(String[] args) throws Exception { - RepositoryLookup repoLookup = new RepositoryLookup(); - RepositoryLookup.Options cmdLineOpts = RepositoryLookup.Options.parse(args); - HgRepository hgRepo = repoLookup.detect(cmdLineOpts); + Options cmdLineOpts = Options.parse(args); + HgRepository hgRepo = cmdLineOpts.findRepository(); if (hgRepo.isInvalid()) { System.err.printf("Can't find repository in: %s\n", hgRepo.getLocation()); return; diff -r 0d279bcc4442 -r 6f1b88693d48 cmdline/org/tmatesoft/hg/console/Options.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cmdline/org/tmatesoft/hg/console/Options.java Mon Jan 24 03:14:45 2011 +0100 @@ -0,0 +1,106 @@ +/* + * Copyright (c) 2011 TMate Software Ltd + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; version 2 of the License. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * For information on how to redistribute this software under + * the terms of a license other than GNU General Public License + * contact TMate Software at support@svnkit.com + */ +package org.tmatesoft.hg.console; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.Iterator; +import java.util.LinkedHashSet; +import java.util.LinkedList; +import java.util.List; +import java.util.Set; + +import org.tmatesoft.hg.repo.HgRepository; +import org.tmatesoft.hg.repo.Lookup; + +/** + * Parse command-line options + * + * @author Artem Tikhomirov + * @author TMate Software Ltd. + */ +class Options { + + public String repoLocation; + public List files; + public int limit = -1; + public Set users; + public Set branches; + + public HgRepository findRepository() throws Exception { + if (repoLocation != null) { + return new Lookup().detect(repoLocation); + } + return new Lookup().detectFromWorkingDir(); + } + + + public static Options parse(String[] commandLineArgs) { + Options rv = new Options(); + List args = Arrays.asList(commandLineArgs); + LinkedList files = new LinkedList(); + for (Iterator it = args.iterator(); it.hasNext(); ) { + String arg = it.next(); + if (arg.charAt(0) == '-') { + // option + if (arg.length() == 1) { + throw new IllegalArgumentException("Bad option: -"); + } + switch ((int) arg.charAt(1)) { + case (int) 'R' : { + if (! it.hasNext()) { + throw new IllegalArgumentException("Need repo location"); + } + rv.repoLocation = it.next(); + break; + } + case (int) 'l' : { + if (!it.hasNext()) { + throw new IllegalArgumentException(); + } + rv.limit = Integer.parseInt(it.next()); + break; + } + case (int) 'u' : { + if (rv.users == null) { + rv.users = new LinkedHashSet(); + } + rv.users.add(it.next()); + break; + } + case (int) 'b' : { + if (rv.branches == null) { + rv.branches = new LinkedHashSet(); + } + rv.branches.add(it.next()); + break; + } + } + } else { + // filename + files.add(arg); + } + } + if (!files.isEmpty()) { + rv.files = new ArrayList(files); + } else { + rv.files = Collections.emptyList(); + } + return rv; + } +} \ No newline at end of file diff -r 0d279bcc4442 -r 6f1b88693d48 cmdline/org/tmatesoft/hg/console/Outgoing.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cmdline/org/tmatesoft/hg/console/Outgoing.java Mon Jan 24 03:14:45 2011 +0100 @@ -0,0 +1,89 @@ +/* + * Copyright (c) 2011 TMate Software Ltd + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; version 2 of the License. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * For information on how to redistribute this software under + * the terms of a license other than GNU General Public License + * contact TMate Software at support@svnkit.com + */ +package org.tmatesoft.hg.console; + +import java.util.Collection; +import java.util.LinkedHashSet; +import java.util.LinkedList; +import java.util.List; + +import org.tmatesoft.hg.core.Nodeid; +import org.tmatesoft.hg.repo.Changelog; +import org.tmatesoft.hg.repo.HgRepository; + + +/** + * WORK IN PROGRESS, DO NOT USE + * hg out + * + * @author Artem Tikhomirov + * @author TMate Software Ltd. + */ +public class Outgoing { + + public static void main(String[] args) throws Exception { + Options cmdLineOpts = Options.parse(args); + HgRepository hgRepo = cmdLineOpts.findRepository(); + if (hgRepo.isInvalid()) { + System.err.printf("Can't find repository in: %s\n", hgRepo.getLocation()); + return; + } + // FIXME detection of + List base = new LinkedList(); + base.add(Nodeid.fromAscii("d6d2a630f4a6d670c90a5ca909150f2b426ec88f".getBytes(), 0, 40)); + // + // fill with all known + Changelog.ParentWalker pw = hgRepo.getChangelog().new ParentWalker(); + pw.init(); + LinkedHashSet sendToRemote = new LinkedHashSet(pw.allNodes()); + dump("initial state", sendToRemote); + // remove base and its parents + LinkedList queueToClean = new LinkedList(base); + while (!queueToClean.isEmpty()) { + Nodeid nid = queueToClean.removeFirst(); + if (sendToRemote.remove(nid)) { + pw.appendParentsOf(nid, queueToClean); + } + } + dump("Clean from known parents", sendToRemote); + // XXX I think sendToRemote is what we actually need here - everything local, missing from remote + // however, if we need to send only a subset of these, need to proceed. + LinkedList result = new LinkedList(); + // find among left those without parents + for (Nodeid nid : sendToRemote) { + Nodeid p1 = pw.firstParent(nid); + // in fact, we may assume nulls are never part of sendToRemote + if (p1 != null && !sendToRemote.contains(p1)) { + Nodeid p2 = pw.secondParent(nid); + if (p2 == null || !sendToRemote.contains(p2)) { + result.add(nid); + } + } + } + dump("Result", result); + // final outcome is the collection of nodes between(lastresult and revision/tip) + // + System.out.println("TODO: nodes between result and tip"); + } + + private static void dump(String s, Collection c) { + System.out.println(s); + for (Nodeid n : c) { + System.out.println(n); + } + } +} diff -r 0d279bcc4442 -r 6f1b88693d48 cmdline/org/tmatesoft/hg/console/Remote.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cmdline/org/tmatesoft/hg/console/Remote.java Mon Jan 24 03:14:45 2011 +0100 @@ -0,0 +1,113 @@ +/* + * Copyright (c) 2011 TMate Software Ltd + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; version 2 of the License. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * For information on how to redistribute this software under + * the terms of a license other than GNU General Public License + * contact TMate Software at support@svnkit.com + */ +package org.tmatesoft.hg.console; + +import java.io.File; +import java.io.FileOutputStream; +import java.io.InputStream; +import java.net.URL; +import java.security.cert.CertificateException; +import java.security.cert.X509Certificate; +import java.util.List; +import java.util.Map; +import java.util.prefs.Preferences; +import java.util.zip.InflaterInputStream; + +import javax.net.ssl.HttpsURLConnection; +import javax.net.ssl.SSLContext; +import javax.net.ssl.TrustManager; +import javax.net.ssl.X509TrustManager; + +import org.tmatesoft.hg.internal.ConfigFile; + +/** + * WORK IN PROGRESS, DO NOT USE + * + * @author Artem Tikhomirov + * @author TMate Software Ltd. + */ +public class Remote { + + /* + * @see http://mercurial.selenic.com/wiki/WireProtocol + cmd=branches gives 4 nodeids (head, root, first parent, second parent) per line (few lines possible, per branch, perhaps?) + cmd=capabilities gives lookup ...subset and 3 compress methods + // lookup changegroupsubset unbundle=HG10GZ,HG10BZ,HG10UN + cmd=heads gives space-separated list of nodeids (or just one) + nodeids are in hex (printable) format, need to convert fromAscii() + cmd=branchmap + */ + public static void main(String[] args) throws Exception { + String nid = "d6d2a630f4a6d670c90a5ca909150f2b426ec88f"; + ConfigFile cfg = new ConfigFile(); + cfg.addLocation(new File(System.getProperty("user.home"), ".hgrc")); + String svnkitServer = cfg.getSection("paths").get("svnkit"); + URL url = new URL(svnkitServer + "?cmd=changegroup&roots=a78c980749e3ccebb47138b547e9b644a22797a9"); + + SSLContext sslContext = SSLContext.getInstance("SSL"); + class TrustEveryone implements X509TrustManager { + public void checkClientTrusted(X509Certificate[] chain, String authType) throws CertificateException { + System.out.println("checkClientTrusted " + authType); + } + public void checkServerTrusted(X509Certificate[] chain, String authType) throws CertificateException { + System.out.println("checkServerTrusted" + authType); + } + public X509Certificate[] getAcceptedIssuers() { + return new X509Certificate[0]; + } + } + // + Preferences tempNode = Preferences.userRoot().node("xxx"); + tempNode.putByteArray("xxx", url.getUserInfo().getBytes()); + String authInfo = tempNode.get("xxx", null); + tempNode.removeNode(); + // + sslContext.init(null, new TrustManager[] { new TrustEveryone() }, null); + HttpsURLConnection urlConnection = (HttpsURLConnection) url.openConnection(); + urlConnection.addRequestProperty("User-Agent", "jhg/0.1.0"); + urlConnection.addRequestProperty("Accept", "application/mercurial-0.1"); + urlConnection.addRequestProperty("Authorization", "Basic " + authInfo); + urlConnection.setSSLSocketFactory(sslContext.getSocketFactory()); + urlConnection.connect(); + System.out.println("Response headers:"); + final Map> headerFields = urlConnection.getHeaderFields(); + for (String s : headerFields.keySet()) { + System.out.printf("%s: %s\n", s, urlConnection.getHeaderField(s)); + } + System.out.printf("Content type is %s and its length is %d\n", urlConnection.getContentType(), urlConnection.getContentLength()); + InputStream is = urlConnection.getInputStream(); +// int b; +// while ((b =is.read()) != -1) { +// System.out.print((char) b); +// } +// System.out.println(); + InflaterInputStream zipStream = new InflaterInputStream(is); + File tf = File.createTempFile("hg-bundle-", null); + FileOutputStream fos = new FileOutputStream(tf); + int r; + byte[] buf = new byte[8*1024]; + while ((r = zipStream.read(buf)) != -1) { + fos.write(buf, 0, r); + } + fos.close(); + zipStream.close(); + System.out.println(tf); + + urlConnection.disconnect(); + // + } +} diff -r 0d279bcc4442 -r 6f1b88693d48 cmdline/org/tmatesoft/hg/console/Status.java --- a/cmdline/org/tmatesoft/hg/console/Status.java Sun Jan 23 04:06:18 2011 +0100 +++ b/cmdline/org/tmatesoft/hg/console/Status.java Mon Jan 24 03:14:45 2011 +0100 @@ -16,20 +16,20 @@ */ package org.tmatesoft.hg.console; -import static com.tmate.hgkit.ll.HgRepository.TIP; +import static org.tmatesoft.hg.repo.HgRepository.TIP; import java.util.ArrayList; import java.util.Collections; import java.util.List; +import java.util.Map; -import com.tmate.hgkit.fs.RepositoryLookup; -import com.tmate.hgkit.ll.HgDataFile; -import com.tmate.hgkit.ll.HgRepository; -import com.tmate.hgkit.ll.Internals; -import com.tmate.hgkit.ll.LocalHgRepo; -import com.tmate.hgkit.ll.Nodeid; -import com.tmate.hgkit.ll.StatusCollector; -import com.tmate.hgkit.ll.WorkingCopyStatusCollector; +import org.tmatesoft.hg.core.Nodeid; +import org.tmatesoft.hg.core.Path; +import org.tmatesoft.hg.repo.HgDataFile; +import org.tmatesoft.hg.repo.HgRepository; +import org.tmatesoft.hg.repo.Internals; +import org.tmatesoft.hg.repo.StatusCollector; +import org.tmatesoft.hg.repo.WorkingCopyStatusCollector; /** * @@ -39,14 +39,35 @@ public class Status { public static void main(String[] args) throws Exception { - RepositoryLookup repoLookup = new RepositoryLookup(); - RepositoryLookup.Options cmdLineOpts = RepositoryLookup.Options.parse(args); - HgRepository hgRepo = repoLookup.detect(cmdLineOpts); + Options cmdLineOpts = Options.parse(args); + HgRepository hgRepo = cmdLineOpts.findRepository(); if (hgRepo.isInvalid()) { System.err.printf("Can't find repository in: %s\n", hgRepo.getLocation()); return; } System.out.println(hgRepo.getLocation()); + // +// bunchOfTests(hgRepo); + // +// new Internals(hgRepo).dumpDirstate(); + // + mardu(hgRepo); + } + + private static void mardu(HgRepository hgRepo) { + WorkingCopyStatusCollector wcc = new WorkingCopyStatusCollector(hgRepo); + StatusCollector.Record r = new StatusCollector.Record(); + wcc.walk(TIP, r); + sortAndPrint('M', r.getModified()); + sortAndPrint('A', r.getAdded(), r.getCopied()); + sortAndPrint('R', r.getRemoved()); + sortAndPrint('?', r.getUnknown()); +// sortAndPrint('I', r.getIgnored()); +// sortAndPrint('C', r.getClean()); + sortAndPrint('!', r.getMissing()); + } + + private static void bunchOfTests(HgRepository hgRepo) throws Exception { Internals debug = new Internals(hgRepo); debug.dumpDirstate(); final StatusDump dump = new StatusDump(); @@ -66,7 +87,7 @@ System.out.println("\n\nTry hg status --change :"); sc.change(0, dump); System.out.println("\nStatus against working dir:"); - WorkingCopyStatusCollector wcc = new WorkingCopyStatusCollector(hgRepo, ((LocalHgRepo) hgRepo).createWorkingDirWalker()); + WorkingCopyStatusCollector wcc = new WorkingCopyStatusCollector(hgRepo); wcc.walk(TIP, dump); System.out.println(); System.out.printf("Manifest of the revision %d:\n", r2); @@ -75,7 +96,7 @@ System.out.printf("\nStatus of working dir against %d:\n", r2); r = wcc.status(r2); sortAndPrint('M', r.getModified()); - sortAndPrint('A', r.getAdded()); + sortAndPrint('A', r.getAdded(), r.getCopied()); sortAndPrint('R', r.getRemoved()); sortAndPrint('?', r.getUnknown()); sortAndPrint('I', r.getIgnored()); @@ -84,17 +105,23 @@ } private static void sortAndPrint(char prefix, List ul) { + sortAndPrint(prefix, ul, null); + } + private static void sortAndPrint(char prefix, List ul, Map copies) { ArrayList sortList = new ArrayList(ul); Collections.sort(sortList); for (String s : sortList) { System.out.print(prefix); System.out.print(' '); System.out.println(s); + if (copies != null && copies.containsKey(s)) { + System.out.println(" " + copies.get(s)); + } } } protected static void testStatusInternals(HgRepository hgRepo) { - HgDataFile n = hgRepo.getFileNode("design.txt"); + HgDataFile n = hgRepo.getFileNode(Path.create("design.txt")); for (String s : new String[] {"011dfd44417c72bd9e54cf89b82828f661b700ed", "e5529faa06d53e06a816e56d218115b42782f1ba", "c18e7111f1fc89a80a00f6a39d51288289a382fc"}) { // expected: 359, 2123, 3079 byte[] b = s.getBytes(); diff -r 0d279bcc4442 -r 6f1b88693d48 src/com/tmate/hgkit/console/Bundle.java --- a/src/com/tmate/hgkit/console/Bundle.java Sun Jan 23 04:06:18 2011 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,33 +0,0 @@ -/* - * Copyright (c) 2011 Artem Tikhomirov - */ -package com.tmate.hgkit.console; - -import java.io.File; - -import com.tmate.hgkit.fs.DataAccessProvider; -import com.tmate.hgkit.fs.RepositoryLookup; -import com.tmate.hgkit.ll.HgBundle; -import com.tmate.hgkit.ll.HgRepository; - -/** - * - * @author artem - */ -public class Bundle { - - public static void main(String[] args) throws Exception { - RepositoryLookup repoLookup = new RepositoryLookup(); - RepositoryLookup.Options cmdLineOpts = RepositoryLookup.Options.parse(args); - HgRepository hgRepo = repoLookup.detect(cmdLineOpts); - if (hgRepo.isInvalid()) { - System.err.printf("Can't find repository in: %s\n", hgRepo.getLocation()); - return; - } - File bundleFile = new File("/temp/hg/hg-bundle-a78c980749e3.tmp"); - DataAccessProvider dap = new DataAccessProvider(); - HgBundle hgBundle = new HgBundle(dap, bundleFile); -// hgBundle.dump(); - hgBundle.changes(hgRepo); - } -} diff -r 0d279bcc4442 -r 6f1b88693d48 src/com/tmate/hgkit/console/Incoming.java --- a/src/com/tmate/hgkit/console/Incoming.java Sun Jan 23 04:06:18 2011 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,185 +0,0 @@ -/* - * Copyright (c) 2011 Artem Tikhomirov - */ -package com.tmate.hgkit.console; - -import java.util.Collection; -import java.util.HashSet; -import java.util.LinkedHashSet; -import java.util.LinkedList; -import java.util.List; - -import com.tmate.hgkit.fs.RepositoryLookup; -import com.tmate.hgkit.ll.HgRepository; -import com.tmate.hgkit.ll.Nodeid; -import com.tmate.hgkit.ll.Revlog; - -/** - * - * @author artem - */ -public class Incoming { - - public static void main(String[] args) throws Exception { - RepositoryLookup repoLookup = new RepositoryLookup(); - RepositoryLookup.Options cmdLineOpts = RepositoryLookup.Options.parse(args); - HgRepository hgRepo = repoLookup.detect(cmdLineOpts); - if (hgRepo.isInvalid()) { - System.err.printf("Can't find repository in: %s\n", hgRepo.getLocation()); - return; - } - // in fact, all we need from changelog is set of all nodeids. However, since ParentWalker reuses same Nodeids, it's not too expensive - // to reuse it here, XXX although later this may need to be refactored - final Revlog.ParentWalker pw = hgRepo.getChangelog().new ParentWalker(); - pw.init(); - // - HashSet base = new HashSet(); - HashSet unknownRemoteHeads = new HashSet(); - // imagine empty repository - any nodeid from remote heads would be unknown - unknownRemoteHeads.add(Nodeid.fromAscii("382cfe9463db0484a14136e4b38407419525f0c0".getBytes(), 0, 40)); - // - LinkedList remoteBranches = new LinkedList(); - remoteBranches(unknownRemoteHeads, remoteBranches); - // - HashSet visited = new HashSet(); - HashSet processed = new HashSet(); - LinkedList toScan = new LinkedList(); - LinkedHashSet toFetch = new LinkedHashSet(); - // next one seems to track heads we've asked (or plan to ask) remote.branches for - HashSet unknownHeads /*req*/ = new HashSet(unknownRemoteHeads); - while (!remoteBranches.isEmpty()) { - LinkedList toQueryRemote = new LinkedList(); - while (!remoteBranches.isEmpty()) { - RemoteBranch next = remoteBranches.removeFirst(); - if (visited.contains(next.head) || processed.contains(next)) { - continue; - } - if (Nodeid.NULL.equals(next.head)) { - // it's discovery.py that expects next.head to be nullid here, I can't imagine how this may happen, hence this exception - throw new IllegalStateException("I wonder if null if may ever get here with remote branches"); - } else if (pw.knownNode(next.root)) { - // root of the remote change is known locally, analyze to find exact missing changesets - toScan.addLast(new Nodeid[] { next.head, next.root }); - processed.add(next); - } else { - if (!visited.contains(next.root) && !toFetch.contains(next.root)) { - // if parents are locally known, this is new branch (sequence of changes) (sequence sprang out of known parents) - if ((next.p1 == null || pw.knownNode(next.p1)) && (next.p2 == null || pw.knownNode(next.p2))) { - toFetch.add(next.root); - } - // XXX perhaps, may combine this parent processing below (I don't understand what this code is exactly about) - if (pw.knownNode(next.p1)) { - base.add(next.p1); - } - if (pw.knownNode(next.p2)) { - base.add(next.p2); - } - } - if (next.p1 != null && !pw.knownNode(next.p1) && !unknownHeads.contains(next.p1)) { - toQueryRemote.add(next.p1); - unknownHeads.add(next.p1); - } - if (next.p2 != null && !pw.knownNode(next.p2) && !unknownHeads.contains(next.p2)) { - toQueryRemote.add(next.p2); - unknownHeads.add(next.p2); - } - } - visited.add(next.head); - } - if (!toQueryRemote.isEmpty()) { - // discovery.py in fact does this in batches of 10 revisions a time. - // however, this slicing may be done in remoteBranches call instead (if needed) - remoteBranches(toQueryRemote, remoteBranches); - } - } - while (!toScan.isEmpty()) { - Nodeid[] head_root = toScan.removeFirst(); - List nodesBetween = remoteBetween(head_root[0], head_root[1], new LinkedList()); - nodesBetween.add(head_root[1]); - int x = 1; - Nodeid p = head_root[0]; - for (Nodeid i : nodesBetween) { - System.out.println("narrowing " + x + ":" + nodesBetween.size() + " " + i.shortNotation()); - if (pw.knownNode(i)) { - if (x <= 2) { - toFetch.add(p); - base.add(i); - } else { - // XXX original discovery.py collects new elements to scan separately - // likely to "batch" calls to server - System.out.println("narrowed branch search to " + p.shortNotation() + ":" + i.shortNotation()); - toScan.addLast(new Nodeid[] { p, i }); - } - break; - } - x = x << 1; - p = i; - } - } - for (Nodeid n : toFetch) { - if (pw.knownNode(n)) { - System.out.println("Erroneous to fetch:" + n); - } else { - System.out.println(n); - } - } - - } - - static final class RemoteBranch { - public Nodeid head, root, p1, p2; - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - if (false == obj instanceof RemoteBranch) { - return false; - } - RemoteBranch o = (RemoteBranch) obj; - return head.equals(o.head) && root.equals(o.root) && (p1 == null && o.p1 == null || p1.equals(o.p1)) && (p2 == null && o.p2 == null || p2.equals(o.p2)); - } - } - - private static void remoteBranches(Collection unknownRemoteHeads, List remoteBranches) { - // discovery.findcommonincoming: - // unknown = remote.branches(remote.heads); - // sent: cmd=branches&roots=d6d2a630f4a6d670c90a5ca909150f2b426ec88f+ - // received: d6d2a630f4a6d670c90a5ca909150f2b426ec88f dbd663faec1f0175619cf7668bddc6350548b8d6 0000000000000000000000000000000000000000 0000000000000000000000000000000000000000 - // head, root, first parent, second parent - // - // TODO implement this with remote access - // - RemoteBranch rb = new RemoteBranch(); - rb.head = unknownRemoteHeads.iterator().next(); - rb.root = Nodeid.fromAscii("dbd663faec1f0175619cf7668bddc6350548b8d6".getBytes(), 0, 40); - remoteBranches.add(rb); - } - - private static List remoteBetween(Nodeid nodeid1, Nodeid nodeid2, List list) { - // sent: cmd=between&pairs=d6d2a630f4a6d670c90a5ca909150f2b426ec88f-dbd663faec1f0175619cf7668bddc6350548b8d6 - // received: a78c980749e3ccebb47138b547e9b644a22797a9 286d221f6c28cbfce25ea314e1f46a23b7f979d3 fc265ddeab262ff5c34b4cf4e2522d8d41f1f05b a3576694a4d1edaa681cab15b89d6b556b02aff4 - // 1st, 2nd, fourth and eights of total 8 changes between rev9 and rev0 - // - // - // a78c980749e3ccebb47138b547e9b644a22797a9 286d221f6c28cbfce25ea314e1f46a23b7f979d3 fc265ddeab262ff5c34b4cf4e2522d8d41f1f05b a3576694a4d1edaa681cab15b89d6b556b02aff4 - //d6d2a630f4a6d670c90a5ca909150f2b426ec88f a78c980749e3ccebb47138b547e9b644a22797a9 5abe5af181bd6a6d3e94c378376c901f0f80da50 08db726a0fb7914ac9d27ba26dc8bbf6385a0554 - - // TODO implement with remote access - String response = null; - if (nodeid1.equals(Nodeid.fromAscii("382cfe9463db0484a14136e4b38407419525f0c0".getBytes(), 0, 40)) && nodeid2.equals(Nodeid.fromAscii("dbd663faec1f0175619cf7668bddc6350548b8d6".getBytes(), 0, 40))) { - response = "d6d2a630f4a6d670c90a5ca909150f2b426ec88f a78c980749e3ccebb47138b547e9b644a22797a9 5abe5af181bd6a6d3e94c378376c901f0f80da50 08db726a0fb7914ac9d27ba26dc8bbf6385a0554"; - } else if (nodeid1.equals(Nodeid.fromAscii("a78c980749e3ccebb47138b547e9b644a22797a9".getBytes(), 0, 40)) && nodeid2.equals(Nodeid.fromAscii("5abe5af181bd6a6d3e94c378376c901f0f80da50".getBytes(), 0, 40))) { - response = "286d221f6c28cbfce25ea314e1f46a23b7f979d3"; - } - if (response == null) { - throw HgRepository.notImplemented(); - } - for (String s : response.split(" ")) { - list.add(Nodeid.fromAscii(s.getBytes(), 0, 40)); - } - return list; - } - -} diff -r 0d279bcc4442 -r 6f1b88693d48 src/com/tmate/hgkit/console/Outgoing.java --- a/src/com/tmate/hgkit/console/Outgoing.java Sun Jan 23 04:06:18 2011 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,74 +0,0 @@ -/* - * Copyright (c) 2011 Artem Tikhomirov - */ -package com.tmate.hgkit.console; - -import java.util.Collection; -import java.util.LinkedHashSet; -import java.util.LinkedList; -import java.util.List; - -import com.tmate.hgkit.fs.RepositoryLookup; -import com.tmate.hgkit.ll.HgRepository; -import com.tmate.hgkit.ll.Nodeid; -import com.tmate.hgkit.ll.Revlog; - -/** - * hg out - * @author artem - */ -public class Outgoing { - - public static void main(String[] args) throws Exception { - RepositoryLookup repoLookup = new RepositoryLookup(); - RepositoryLookup.Options cmdLineOpts = RepositoryLookup.Options.parse(args); - HgRepository hgRepo = repoLookup.detect(cmdLineOpts); - if (hgRepo.isInvalid()) { - System.err.printf("Can't find repository in: %s\n", hgRepo.getLocation()); - return; - } - // FIXME detection of - List base = new LinkedList(); - base.add(Nodeid.fromAscii("d6d2a630f4a6d670c90a5ca909150f2b426ec88f".getBytes(), 0, 40)); - // - // fill with all known - Revlog.ParentWalker pw = hgRepo.getChangelog().new ParentWalker(); - pw.init(); - LinkedHashSet sendToRemote = new LinkedHashSet(pw.allNodes()); - dump("initial state", sendToRemote); - // remove base and its parents - LinkedList queueToClean = new LinkedList(base); - while (!queueToClean.isEmpty()) { - Nodeid nid = queueToClean.removeFirst(); - if (sendToRemote.remove(nid)) { - pw.appendParentsOf(nid, queueToClean); - } - } - dump("Clean from known parents", sendToRemote); - // XXX I think sendToRemote is what we actually need here - everything local, missing from remote - // however, if we need to send only a subset of these, need to proceed. - LinkedList result = new LinkedList(); - // find among left those without parents - for (Nodeid nid : sendToRemote) { - Nodeid p1 = pw.firstParent(nid); - // in fact, we may assume nulls are never part of sendToRemote - if (p1 != null && !sendToRemote.contains(p1)) { - Nodeid p2 = pw.secondParent(nid); - if (p2 == null || !sendToRemote.contains(p2)) { - result.add(nid); - } - } - } - dump("Result", result); - // final outcome is the collection of nodes between(lastresult and revision/tip) - // - System.out.println("TODO: nodes between result and tip"); - } - - private static void dump(String s, Collection c) { - System.out.println(s); - for (Nodeid n : c) { - System.out.println(n); - } - } -} diff -r 0d279bcc4442 -r 6f1b88693d48 src/com/tmate/hgkit/console/Remote.java --- a/src/com/tmate/hgkit/console/Remote.java Sun Jan 23 04:06:18 2011 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,98 +0,0 @@ -/* - * Copyright (c) 2011 Artem Tikhomirov - */ -package com.tmate.hgkit.console; - -import java.io.File; -import java.io.FileOutputStream; -import java.io.InputStream; -import java.net.URL; -import java.security.cert.CertificateException; -import java.security.cert.X509Certificate; -import java.util.List; -import java.util.Map; -import java.util.prefs.Preferences; -import java.util.zip.InflaterInputStream; - -import javax.net.ssl.HttpsURLConnection; -import javax.net.ssl.SSLContext; -import javax.net.ssl.TrustManager; -import javax.net.ssl.X509TrustManager; - -import org.tmatesoft.hg.internal.ConfigFile; - -/** - * - * @author artem - */ -public class Remote { - - /* - * @see http://mercurial.selenic.com/wiki/WireProtocol - cmd=branches gives 4 nodeids (head, root, first parent, second parent) per line (few lines possible, per branch, perhaps?) - cmd=capabilities gives lookup ...subset and 3 compress methods - // lookup changegroupsubset unbundle=HG10GZ,HG10BZ,HG10UN - cmd=heads gives space-separated list of nodeids (or just one) - nodeids are in hex (printable) format, need to convert fromAscii() - cmd=branchmap - */ - public static void main(String[] args) throws Exception { - String nid = "d6d2a630f4a6d670c90a5ca909150f2b426ec88f"; - ConfigFile cfg = new ConfigFile(); - cfg.addLocation(new File(System.getProperty("user.home"), ".hgrc")); - String svnkitServer = cfg.getSection("paths").get("svnkit"); - URL url = new URL(svnkitServer + "?cmd=changegroup&roots=a78c980749e3ccebb47138b547e9b644a22797a9"); - - SSLContext sslContext = SSLContext.getInstance("SSL"); - class TrustEveryone implements X509TrustManager { - public void checkClientTrusted(X509Certificate[] chain, String authType) throws CertificateException { - System.out.println("checkClientTrusted " + authType); - } - public void checkServerTrusted(X509Certificate[] chain, String authType) throws CertificateException { - System.out.println("checkServerTrusted" + authType); - } - public X509Certificate[] getAcceptedIssuers() { - return new X509Certificate[0]; - } - } - // - Preferences tempNode = Preferences.userRoot().node("xxx"); - tempNode.putByteArray("xxx", url.getUserInfo().getBytes()); - String authInfo = tempNode.get("xxx", null); - tempNode.removeNode(); - // - sslContext.init(null, new TrustManager[] { new TrustEveryone() }, null); - HttpsURLConnection urlConnection = (HttpsURLConnection) url.openConnection(); - urlConnection.addRequestProperty("User-Agent", "jhg/0.1.0"); - urlConnection.addRequestProperty("Accept", "application/mercurial-0.1"); - urlConnection.addRequestProperty("Authorization", "Basic " + authInfo); - urlConnection.setSSLSocketFactory(sslContext.getSocketFactory()); - urlConnection.connect(); - System.out.println("Response headers:"); - final Map> headerFields = urlConnection.getHeaderFields(); - for (String s : headerFields.keySet()) { - System.out.printf("%s: %s\n", s, urlConnection.getHeaderField(s)); - } - System.out.printf("Content type is %s and its length is %d\n", urlConnection.getContentType(), urlConnection.getContentLength()); - InputStream is = urlConnection.getInputStream(); -// int b; -// while ((b =is.read()) != -1) { -// System.out.print((char) b); -// } -// System.out.println(); - InflaterInputStream zipStream = new InflaterInputStream(is); - File tf = File.createTempFile("hg-bundle-", null); - FileOutputStream fos = new FileOutputStream(tf); - int r; - byte[] buf = new byte[8*1024]; - while ((r = zipStream.read(buf)) != -1) { - fos.write(buf, 0, r); - } - fos.close(); - zipStream.close(); - System.out.println(tf); - - urlConnection.disconnect(); - // - } -} diff -r 0d279bcc4442 -r 6f1b88693d48 src/com/tmate/hgkit/fs/DataAccess.java --- a/src/com/tmate/hgkit/fs/DataAccess.java Sun Jan 23 04:06:18 2011 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,48 +0,0 @@ -/* - * Copyright (c) 2010 Artem Tikhomirov - */ -package com.tmate.hgkit.fs; - -import java.io.IOException; - -/** - * relevant parts of DataInput, non-stream nature (seek operation), explicit check for end of data. - * convenient skip (+/- bytes) - * Primary goal - effective file read, so that clients don't need to care whether to call few - * distinct getInt() or readBytes(totalForFewInts) and parse themselves instead in an attempt to optimize. - */ -public class DataAccess { - public boolean isEmpty() { - return true; - } - // absolute positioning - public void seek(long offset) throws IOException { - throw new UnsupportedOperationException(); - } - // relative positioning - public void skip(int bytes) throws IOException { - throw new UnsupportedOperationException(); - } - // shall be called once this object no longer needed - public void done() { - // no-op in this empty implementation - } - public int readInt() throws IOException { - byte[] b = new byte[4]; - readBytes(b, 0, 4); - return b[0] << 24 | (b[1] & 0xFF) << 16 | (b[2] & 0xFF) << 8 | (b[3] & 0xFF); - } - public long readLong() throws IOException { - byte[] b = new byte[8]; - readBytes(b, 0, 8); - int i1 = b[0] << 24 | (b[1] & 0xFF) << 16 | (b[2] & 0xFF) << 8 | (b[3] & 0xFF); - int i2 = b[4] << 24 | (b[5] & 0xFF) << 16 | (b[6] & 0xFF) << 8 | (b[7] & 0xFF); - return ((long) i1) << 32 | ((long) i2 & 0xFFFFFFFF); - } - public void readBytes(byte[] buf, int offset, int length) throws IOException { - throw new UnsupportedOperationException(); - } - public byte readByte() throws IOException { - throw new UnsupportedOperationException(); - } -} \ No newline at end of file diff -r 0d279bcc4442 -r 6f1b88693d48 src/com/tmate/hgkit/fs/DataAccessProvider.java --- a/src/com/tmate/hgkit/fs/DataAccessProvider.java Sun Jan 23 04:06:18 2011 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,265 +0,0 @@ -/* - * Copyright (c) 2010, 2011 Artem Tikhomirov - */ -package com.tmate.hgkit.fs; - -import java.io.File; -import java.io.FileInputStream; -import java.io.IOException; -import java.nio.ByteBuffer; -import java.nio.MappedByteBuffer; -import java.nio.channels.FileChannel; - -/** - * - * @author artem - */ -public class DataAccessProvider { - - private final int mapioMagicBoundary; - private final int bufferSize; - - public DataAccessProvider() { - this(100 * 1024, 8 * 1024); - } - - public DataAccessProvider(int mapioBoundary, int regularBufferSize) { - mapioMagicBoundary = mapioBoundary; - bufferSize = regularBufferSize; - } - - public DataAccess create(File f) { - if (!f.exists()) { - return new DataAccess(); - } - try { - FileChannel fc = new FileInputStream(f).getChannel(); - if (fc.size() > mapioMagicBoundary) { - // TESTS: bufLen of 1024 was used to test MemMapFileAccess - return new MemoryMapFileAccess(fc, fc.size(), mapioMagicBoundary); - } else { - // XXX once implementation is more or less stable, - // may want to try ByteBuffer.allocateDirect() to see - // if there's any performance gain. - boolean useDirectBuffer = false; - // TESTS: bufferSize of 100 was used to check buffer underflow states when readBytes reads chunks bigger than bufSize - return new FileAccess(fc, fc.size(), bufferSize, useDirectBuffer); - } - } catch (IOException ex) { - // unlikely to happen, we've made sure file exists. - ex.printStackTrace(); // FIXME log error - } - return new DataAccess(); // non-null, empty. - } - - // DOESN'T WORK YET - private static class MemoryMapFileAccess extends DataAccess { - private FileChannel fileChannel; - private final long size; - private long position = 0; // always points to buffer's absolute position in the file - private final int memBufferSize; - private MappedByteBuffer buffer; - - public MemoryMapFileAccess(FileChannel fc, long channelSize, int /*long?*/ bufferSize) { - fileChannel = fc; - size = channelSize; - memBufferSize = bufferSize; - } - - @Override - public boolean isEmpty() { - return position + (buffer == null ? 0 : buffer.position()) >= size; - } - - @Override - public void seek(long offset) { - assert offset >= 0; - // offset may not necessarily be further than current position in the file (e.g. rewind) - if (buffer != null && /*offset is within buffer*/ offset >= position && (offset - position) < buffer.limit()) { - buffer.position((int) (offset - position)); - } else { - position = offset; - buffer = null; - } - } - - @Override - public void skip(int bytes) throws IOException { - assert bytes >= 0; - if (buffer == null) { - position += bytes; - return; - } - if (buffer.remaining() > bytes) { - buffer.position(buffer.position() + bytes); - } else { - position += buffer.position() + bytes; - buffer = null; - } - } - - private void fill() throws IOException { - if (buffer != null) { - position += buffer.position(); - } - long left = size - position; - buffer = fileChannel.map(FileChannel.MapMode.READ_ONLY, position, left < memBufferSize ? left : memBufferSize); - } - - @Override - public void readBytes(byte[] buf, int offset, int length) throws IOException { - if (buffer == null || !buffer.hasRemaining()) { - fill(); - } - // XXX in fact, we may try to create a MappedByteBuffer of exactly length size here, and read right away - while (length > 0) { - int tail = buffer.remaining(); - if (tail == 0) { - throw new IOException(); - } - if (tail >= length) { - buffer.get(buf, offset, length); - } else { - buffer.get(buf, offset, tail); - fill(); - } - offset += tail; - length -= tail; - } - } - - @Override - public byte readByte() throws IOException { - if (buffer == null || !buffer.hasRemaining()) { - fill(); - } - if (buffer.hasRemaining()) { - return buffer.get(); - } - throw new IOException(); - } - - @Override - public void done() { - buffer = null; - if (fileChannel != null) { - try { - fileChannel.close(); - } catch (IOException ex) { - ex.printStackTrace(); // log debug - } - fileChannel = null; - } - } - } - - // (almost) regular file access - FileChannel and buffers. - private static class FileAccess extends DataAccess { - private FileChannel fileChannel; - private final long size; - private ByteBuffer buffer; - private long bufferStartInFile = 0; // offset of this.buffer in the file. - - public FileAccess(FileChannel fc, long channelSize, int bufferSizeHint, boolean useDirect) { - fileChannel = fc; - size = channelSize; - final int capacity = size < bufferSizeHint ? (int) size : bufferSizeHint; - buffer = useDirect ? ByteBuffer.allocateDirect(capacity) : ByteBuffer.allocate(capacity); - buffer.flip(); // or .limit(0) to indicate it's empty - } - - @Override - public boolean isEmpty() { - return bufferStartInFile + buffer.position() >= size; - } - - @Override - public void seek(long offset) throws IOException { - if (offset > size) { - throw new IllegalArgumentException(); - } - if (offset < bufferStartInFile + buffer.limit() && offset >= bufferStartInFile) { - buffer.position((int) (offset - bufferStartInFile)); - } else { - // out of current buffer, invalidate it (force re-read) - // XXX or ever re-read it right away? - bufferStartInFile = offset; - buffer.clear(); - buffer.limit(0); // or .flip() to indicate we switch to reading - fileChannel.position(offset); - } - } - - @Override - public void skip(int bytes) throws IOException { - final int newPos = buffer.position() + bytes; - if (newPos >= 0 && newPos < buffer.limit()) { - // no need to move file pointer, just rewind/seek buffer - buffer.position(newPos); - } else { - // - seek(bufferStartInFile + newPos); - } - } - - private boolean fill() throws IOException { - if (!buffer.hasRemaining()) { - bufferStartInFile += buffer.limit(); - buffer.clear(); - if (bufferStartInFile < size) { // just in case there'd be any exception on EOF, not -1 - fileChannel.read(buffer); - // may return -1 when EOF, but empty will reflect this, hence no explicit support here - } - buffer.flip(); - } - return buffer.hasRemaining(); - } - - @Override - public void readBytes(byte[] buf, int offset, int length) throws IOException { - if (!buffer.hasRemaining()) { - fill(); - } - while (length > 0) { - int tail = buffer.remaining(); - if (tail == 0) { - throw new IOException(); // shall not happen provided stream contains expected data and no attempts to read past isEmpty() == true are made. - } - if (tail >= length) { - buffer.get(buf, offset, length); - } else { - buffer.get(buf, offset, tail); - fill(); - } - offset += tail; - length -= tail; - } - } - - @Override - public byte readByte() throws IOException { - if (buffer.hasRemaining()) { - return buffer.get(); - } - if (fill()) { - return buffer.get(); - } - throw new IOException(); - } - - @Override - public void done() { - if (buffer != null) { - buffer = null; - } - if (fileChannel != null) { - try { - fileChannel.close(); - } catch (IOException ex) { - ex.printStackTrace(); // log debug - } - fileChannel = null; - } - } - } -} diff -r 0d279bcc4442 -r 6f1b88693d48 src/com/tmate/hgkit/fs/FileWalker.java --- a/src/com/tmate/hgkit/fs/FileWalker.java Sun Jan 23 04:06:18 2011 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,92 +0,0 @@ -/* - * Copyright (c) 2011 Artem Tikhomirov - */ -package com.tmate.hgkit.fs; - -import java.io.File; -import java.util.LinkedList; -import java.util.NoSuchElementException; - -/** - * - * @author artem - */ -public class FileWalker { - - private final File startDir; - private final LinkedList dirQueue; - private final LinkedList fileQueue; - private File nextFile; - private String nextPath; - - // FilenameFilter is used in a non-standard way - first argument, dir, is always startDir, - // while second arg, name, is startDir-relative path to the file in question - public FileWalker(File startDir) { - this.startDir = startDir; - dirQueue = new LinkedList(); - fileQueue = new LinkedList(); - reset(); - } - - public void reset() { - fileQueue.clear(); - dirQueue.clear(); - dirQueue.add(startDir); - nextFile = null; - nextPath = null; - } - - public boolean hasNext() { - return fill(); - } - - public void next() { - if (!fill()) { - throw new NoSuchElementException(); - } - nextFile = fileQueue.removeFirst(); - nextPath = path(nextFile); - } - - public String name() { - return nextPath; - } - - public File file() { - return nextFile; - } - - private String path(File f) { - // XXX LocalHgRepo#normalize - String p = f.getPath().substring(startDir.getPath().length() + 1); - return p.replace('\\', '/').replace("//", "/"); - } - - private File[] listFiles(File f) { - // in case we need to solve os-related file issues (mac with some encodings?) - return f.listFiles(); - } - - // return true when fill added any elements to fileQueue. - private boolean fill() { - while (fileQueue.isEmpty()) { - if (dirQueue.isEmpty()) { - return false; - } - while (!dirQueue.isEmpty()) { - File dir = dirQueue.removeFirst(); - for (File f : listFiles(dir)) { - if (f.isDirectory()) { - if (!".hg".equals(f.getName())) { - dirQueue.addLast(f); - } - } else { - fileQueue.addLast(f); - } - } - break; - } - } - return !fileQueue.isEmpty(); - } -} diff -r 0d279bcc4442 -r 6f1b88693d48 src/com/tmate/hgkit/fs/RepositoryLookup.java --- a/src/com/tmate/hgkit/fs/RepositoryLookup.java Sun Jan 23 04:06:18 2011 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,119 +0,0 @@ -/** - * Copyright (c) 2010 Artem Tikhomirov - */ -package com.tmate.hgkit.fs; - -import java.io.File; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.Iterator; -import java.util.LinkedHashSet; -import java.util.LinkedList; -import java.util.List; -import java.util.Set; - -import com.tmate.hgkit.ll.HgRepository; -import com.tmate.hgkit.ll.LocalHgRepo; - -/** - * @author artem - */ -public class RepositoryLookup { - - public HgRepository detect(Options opts) throws Exception { - if (opts.repoLocation != null) { - return detect(opts.repoLocation); - } - return detectFromWorkingDir(); - } - - public HgRepository detect(String[] commandLineArgs) throws Exception { - return detect(Options.parse(commandLineArgs)); - } - - public HgRepository detectFromWorkingDir() throws Exception { - return detect(System.getProperty("user.dir")); - } - - public HgRepository detect(String location) throws Exception /*FIXME Exception type, RepoInitException? */ { - File dir = new File(location); - File repository; - do { - repository = new File(dir, ".hg"); - if (repository.exists() && repository.isDirectory()) { - break; - } - repository = null; - dir = dir.getParentFile(); - - } while(dir != null); - if (repository == null) { - return new LocalHgRepo(location); - } - return new LocalHgRepo(repository); - } - - public static class Options { - - public String repoLocation; - public List files; - public int limit = -1; - public Set users; - public Set branches; - - public static Options parse(String[] commandLineArgs) { - Options rv = new Options(); - List args = Arrays.asList(commandLineArgs); - LinkedList files = new LinkedList(); - for (Iterator it = args.iterator(); it.hasNext(); ) { - String arg = it.next(); - if (arg.charAt(0) == '-') { - // option - if (arg.length() == 1) { - throw new IllegalArgumentException("Bad option: -"); - } - switch ((int) arg.charAt(1)) { - case (int) 'R' : { - if (! it.hasNext()) { - throw new IllegalArgumentException("Need repo location"); - } - rv.repoLocation = it.next(); - break; - } - case (int) 'l' : { - if (!it.hasNext()) { - throw new IllegalArgumentException(); - } - rv.limit = Integer.parseInt(it.next()); - break; - } - case (int) 'u' : { - if (rv.users == null) { - rv.users = new LinkedHashSet(); - } - rv.users.add(it.next()); - break; - } - case (int) 'b' : { - if (rv.branches == null) { - rv.branches = new LinkedHashSet(); - } - rv.branches.add(it.next()); - break; - } - } - } else { - // filename - files.add(arg); - } - } - if (!files.isEmpty()) { - rv.files = new ArrayList(files); - } else { - rv.files = Collections.emptyList(); - } - return rv; - } - } -} diff -r 0d279bcc4442 -r 6f1b88693d48 src/com/tmate/hgkit/fs/package.html --- a/src/com/tmate/hgkit/fs/package.html Sun Jan 23 04:06:18 2011 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,5 +0,0 @@ - - -File System operations - - \ No newline at end of file diff -r 0d279bcc4442 -r 6f1b88693d48 src/com/tmate/hgkit/ll/Changelog.java --- a/src/com/tmate/hgkit/ll/Changelog.java Sun Jan 23 04:06:18 2011 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,65 +0,0 @@ -/* - * Copyright (c) 2010, 2011 Artem Tikhomirov - */ -package com.tmate.hgkit.ll; - -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; - -/** - * Representation of the Mercurial changelog file (list of ChangeSets) - * @author artem - */ -public class Changelog extends Revlog { - - /*package-local*/ Changelog(HgRepository hgRepo, RevlogStream content) { - super(hgRepo, content); - } - - public void all(final Changeset.Inspector inspector) { - range(0, content.revisionCount() - 1, inspector); - } - - public void range(int start, int end, final Changeset.Inspector inspector) { - Revlog.Inspector i = new Revlog.Inspector() { - - public void next(int revisionNumber, int actualLen, int baseRevision, int linkRevision, int parent1Revision, int parent2Revision, byte[] nodeid, byte[] data) { - Changeset cset = Changeset.parse(data, 0, data.length); - // XXX there's no guarantee for Changeset.Callback that distinct instance comes each time, consider instance reuse - inspector.next(revisionNumber, Nodeid.fromBinary(nodeid, 0), cset); - } - }; - content.iterate(start, end, true, i); - } - - public List range(int start, int end) { - final ArrayList rv = new ArrayList(end - start + 1); - Revlog.Inspector i = new Revlog.Inspector() { - - public void next(int revisionNumber, int actualLen, int baseRevision, int linkRevision, int parent1Revision, int parent2Revision, byte[] nodeid, byte[] data) { - Changeset cset = Changeset.parse(data, 0, data.length); - rv.add(cset); - } - }; - content.iterate(start, end, true, i); - return rv; - } - - public void range(final Changeset.Inspector inspector, final int... revisions) { - if (revisions == null || revisions.length == 0) { - return; - } - Revlog.Inspector i = new Revlog.Inspector() { - - public void next(int revisionNumber, int actualLen, int baseRevision, int linkRevision, int parent1Revision, int parent2Revision, byte[] nodeid, byte[] data) { - if (Arrays.binarySearch(revisions, revisionNumber) >= 0) { - Changeset cset = Changeset.parse(data, 0, data.length); - inspector.next(revisionNumber, Nodeid.fromBinary(nodeid, 0), cset); - } - } - }; - Arrays.sort(revisions); - content.iterate(revisions[0], revisions[revisions.length - 1], true, i); - } -} diff -r 0d279bcc4442 -r 6f1b88693d48 src/com/tmate/hgkit/ll/Changeset.java --- a/src/com/tmate/hgkit/ll/Changeset.java Sun Jan 23 04:06:18 2011 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,211 +0,0 @@ -/* - * Copyright (c) 2010, 2011 Artem Tikhomirov - */ -package com.tmate.hgkit.ll; - -import java.io.UnsupportedEncodingException; -import java.util.ArrayList; -import java.util.Collections; -import java.util.Date; -import java.util.Formatter; -import java.util.HashMap; -import java.util.List; -import java.util.Locale; -import java.util.Map; - -/** - * @see mercurial/changelog.py:read() - *
-        format used:
-        nodeid\n        : manifest node in ascii
-        user\n          : user, no \n or \r allowed
-        time tz extra\n : date (time is int or float, timezone is int)
-                        : extra is metadatas, encoded and separated by '\0'
-                        : older versions ignore it
-        files\n\n       : files modified by the cset, no \n or \r allowed
-        (.*)            : comment (free text, ideally utf-8)
-
-        changelog v0 doesn't use extra
- * 
- * @author artem - */ -public class Changeset implements Cloneable /*for those that would like to keep a copy*/ { - // TODO immutable - private /*final*/ Nodeid manifest; - private String user; - private String comment; - private List files; // unmodifiable collection (otherwise #files() and implicit #clone() shall be revised) - private Date time; - private int timezone; // not sure it's of any use - private Map extras; - - private Changeset() { - } - - public Nodeid manifest() { - return manifest; - } - - public String user() { - return user; - } - - public String comment() { - return comment; - } - - public List files() { - return files; - } - - public Date date() { - return time; - } - - public String dateString() { - StringBuilder sb = new StringBuilder(30); - Formatter f = new Formatter(sb, Locale.US); - f.format("%ta % extras() { - return extras; - } - - public String branch() { - return extras.get("branch"); - } - - @Override - public String toString() { - StringBuilder sb = new StringBuilder(); - sb.append("Changeset {"); - sb.append("User: ").append(user).append(", "); - sb.append("Comment: ").append(comment).append(", "); - sb.append("Manifest: ").append(manifest).append(", "); - sb.append("Date: ").append(time).append(", "); - sb.append("Files: ").append(files.size()); - for (String s : files) { - sb.append(", ").append(s); - } - if (extras != null) { - sb.append(", Extra: ").append(extras); - } - sb.append("}"); - return sb.toString(); - } - - @Override - public Changeset clone() { - try { - return (Changeset) super.clone(); - } catch (CloneNotSupportedException ex) { - throw new InternalError(ex.toString()); - } - } - - public static Changeset parse(byte[] data, int offset, int length) { - Changeset rv = new Changeset(); - rv.init(data, offset, length); - return rv; - } - - /*package-local*/ void init(byte[] data, int offset, int length) { - final int bufferEndIndex = offset + length; - final byte lineBreak = (byte) '\n'; - int breakIndex1 = indexOf(data, lineBreak, offset, bufferEndIndex); - if (breakIndex1 == -1) { - throw new IllegalArgumentException("Bad Changeset data"); - } - Nodeid _nodeid = Nodeid.fromAscii(data, 0, breakIndex1); - int breakIndex2 = indexOf(data, lineBreak, breakIndex1+1, bufferEndIndex); - if (breakIndex2 == -1) { - throw new IllegalArgumentException("Bad Changeset data"); - } - String _user = new String(data, breakIndex1+1, breakIndex2 - breakIndex1 - 1); - int breakIndex3 = indexOf(data, lineBreak, breakIndex2+1, bufferEndIndex); - if (breakIndex3 == -1) { - throw new IllegalArgumentException("Bad Changeset data"); - } - String _timeString = new String(data, breakIndex2+1, breakIndex3 - breakIndex2 - 1); - int space1 = _timeString.indexOf(' '); - if (space1 == -1) { - throw new IllegalArgumentException("Bad Changeset data"); - } - int space2 = _timeString.indexOf(' ', space1+1); - if (space2 == -1) { - space2 = _timeString.length(); - } - long unixTime = Long.parseLong(_timeString.substring(0, space1)); // XXX Float, perhaps - int _timezone = Integer.parseInt(_timeString.substring(space1+1, space2)); - // XXX not sure need to add timezone here - I can't figure out whether Hg keeps GMT time, and records timezone just for info, or unixTime is taken local - // on commit and timezone is recorded to adjust it to UTC. - Date _time = new Date(unixTime * 1000); - String _extras = space2 < _timeString.length() ? _timeString.substring(space2+1) : null; - Map _extrasMap; - if (_extras == null) { - _extrasMap = Collections.singletonMap("branch", "default"); - } else { - _extrasMap = new HashMap(); - for (String pair : _extras.split("\00")) { - int eq = pair.indexOf(':'); - // FIXME need to decode key/value, @see changelog.py:decodeextra - _extrasMap.put(pair.substring(0, eq), pair.substring(eq+1)); - } - if (!_extrasMap.containsKey("branch")) { - _extrasMap.put("branch", "default"); - } - _extrasMap = Collections.unmodifiableMap(_extrasMap); - } - - // - int lastStart = breakIndex3 + 1; - int breakIndex4 = indexOf(data, lineBreak, lastStart, bufferEndIndex); - ArrayList _files = new ArrayList(5); - while (breakIndex4 != -1 && breakIndex4 + 1 < bufferEndIndex) { - _files.add(new String(data, lastStart, breakIndex4 - lastStart)); - lastStart = breakIndex4 + 1; - if (data[breakIndex4 + 1] == lineBreak) { - // found \n\n - break; - } else { - breakIndex4 = indexOf(data, lineBreak, lastStart, bufferEndIndex); - } - } - if (breakIndex4 == -1 || breakIndex4 >= bufferEndIndex) { - throw new IllegalArgumentException("Bad Changeset data"); - } - String _comment; - try { - _comment = new String(data, breakIndex4+2, bufferEndIndex - breakIndex4 - 2, "UTF-8"); - } catch (UnsupportedEncodingException ex) { - _comment = ""; - throw new IllegalStateException("Could hardly happen"); - } - // change this instance at once, don't leave it partially changes in case of error - this.manifest = _nodeid; - this.user = _user; - this.time = _time; - this.timezone = _timezone; - this.files = Collections.unmodifiableList(_files); - this.comment = _comment; - this.extras = _extrasMap; - } - - private static int indexOf(byte[] src, byte what, int startOffset, int endIndex) { - for (int i = startOffset; i < endIndex; i++) { - if (src[i] == what) { - return i; - } - } - return -1; - } - - public interface Inspector { - // first(), last(), single(). - // - // TODO describe whether cset is new instance each time - void next(int revisionNumber, Nodeid nodeid, Changeset cset); - } -} diff -r 0d279bcc4442 -r 6f1b88693d48 src/com/tmate/hgkit/ll/DigestHelper.java --- a/src/com/tmate/hgkit/ll/DigestHelper.java Sun Jan 23 04:06:18 2011 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,97 +0,0 @@ -/* - * Copyright (c) 2010, 2011 Artem Tikhomirov - */ -package com.tmate.hgkit.ll; - -import java.io.IOException; -import java.io.InputStream; -import java.security.MessageDigest; -import java.security.NoSuchAlgorithmException; - -/** - *
- * DigestHelper dh;
- * dh.sha1(...).asHexString();
- *  or 
- * dh = dh.sha1(...);
- * nodeid.equalsTo(dh.asBinary());
- * 
- * @author artem - */ -public class DigestHelper { - private MessageDigest sha1; - private byte[] digest; - - public DigestHelper() { - } - - private MessageDigest getSHA1() { - if (sha1 == null) { - try { - sha1 = MessageDigest.getInstance("SHA-1"); - } catch (NoSuchAlgorithmException ex) { - // could hardly happen, JDK from Sun always has sha1. - ex.printStackTrace(); // FIXME log error - } - } - return sha1; - } - - - public DigestHelper sha1(Nodeid nodeid1, Nodeid nodeid2, byte[] data) { - return sha1(nodeid1.cloneData(), nodeid2.cloneData(), data); - } - - // sha1_digest(min(p1,p2) ++ max(p1,p2) ++ final_text) - public DigestHelper sha1(byte[] nodeidParent1, byte[] nodeidParent2, byte[] data) { - MessageDigest alg = getSHA1(); - if ((nodeidParent1[0] & 0x00FF) < (nodeidParent2[0] & 0x00FF)) { - alg.update(nodeidParent1); - alg.update(nodeidParent2); - } else { - alg.update(nodeidParent2); - alg.update(nodeidParent1); - } - digest = alg.digest(data); - assert digest.length == 20; - return this; - } - - public String asHexString() { - if (digest == null) { - throw new IllegalStateException("Shall init with sha1() call first"); - } - return toHexString(digest, 0, digest.length); - } - - // by reference, be careful not to modify (or #clone() if needed) - public byte[] asBinary() { - if (digest == null) { - throw new IllegalStateException("Shall init with sha1() call first"); - } - return digest; - } - - // XXX perhaps, digest functions should throw an exception, as it's caller responsibility to deal with eof, etc - public DigestHelper sha1(InputStream is /*ByteBuffer*/) throws IOException { - MessageDigest alg = getSHA1(); - byte[] buf = new byte[1024]; - int c; - while ((c = is.read(buf)) != -1) { - alg.update(buf, 0, c); - } - digest = alg.digest(); - return this; - } - - public static String toHexString(byte[] data, final int offset, final int count) { - char[] result = new char[count << 1]; - final String hexDigits = "0123456789abcdef"; - final int end = offset+count; - for (int i = offset, j = 0; i < end; i++) { - result[j++] = hexDigits.charAt((data[i] >>> 4) & 0x0F); - result[j++] = hexDigits.charAt(data[i] & 0x0F); - } - return new String(result); - } -} diff -r 0d279bcc4442 -r 6f1b88693d48 src/com/tmate/hgkit/ll/HgBundle.java --- a/src/com/tmate/hgkit/ll/HgBundle.java Sun Jan 23 04:06:18 2011 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,143 +0,0 @@ -/* - * Copyright (c) 2011 Artem Tikhomirov - */ -package com.tmate.hgkit.ll; - -import java.io.File; -import java.io.IOException; -import java.util.LinkedList; -import java.util.List; - -import com.tmate.hgkit.fs.DataAccess; -import com.tmate.hgkit.fs.DataAccessProvider; - -/** - * @see http://mercurial.selenic.com/wiki/BundleFormat - * - * @author artem - */ -public class HgBundle { - - private final File bundleFile; - private final DataAccessProvider accessProvider; - - public HgBundle(DataAccessProvider dap, File bundle) { - accessProvider = dap; - bundleFile = bundle; - } - - public void changes(HgRepository hgRepo) throws IOException { - DataAccess da = accessProvider.create(bundleFile); - DigestHelper dh = new DigestHelper(); - try { - List changelogGroup = readGroup(da); - if (changelogGroup.isEmpty()) { - throw new IllegalStateException("No changelog group in the bundle"); // XXX perhaps, just be silent and/or log? - } - // XXX in fact, bundle not necessarily starts with the first revision missing in hgRepo - // need to 'scroll' till the last one common. - final Nodeid base = changelogGroup.get(0).firstParent(); - if (!hgRepo.getChangelog().isKnown(base)) { - throw new IllegalArgumentException("unknown parent"); - } - // BundleFormat wiki says: - // Each Changelog entry patches the result of all previous patches - // (the previous, or parent patch of a given patch p is the patch that has a node equal to p's p1 field) - byte[] baseRevContent = hgRepo.getChangelog().content(base); - for (GroupElement ge : changelogGroup) { - byte[] csetContent = RevlogStream.apply(baseRevContent, -1, ge.patches); - dh = dh.sha1(ge.firstParent(), ge.secondParent(), csetContent); // XXX ge may give me access to byte[] content of nodeid directly, perhaps, I don't need DH to be friend of Nodeid? - if (!ge.node().equalsTo(dh.asBinary())) { - throw new IllegalStateException("Integrity check failed on " + bundleFile + ", node:" + ge.node()); - } - Changeset cs = Changeset.parse(csetContent, 0, csetContent.length); - System.out.println(cs.toString()); - baseRevContent = csetContent; - } - } finally { - da.done(); - } - } - - public void dump() throws IOException { - DataAccess da = accessProvider.create(bundleFile); - try { - LinkedList names = new LinkedList(); - if (!da.isEmpty()) { - System.out.println("Changelog group"); - List changelogGroup = readGroup(da); - for (GroupElement ge : changelogGroup) { - System.out.printf(" %s %s %s %s; patches:%d\n", ge.node(), ge.firstParent(), ge.secondParent(), ge.cset(), ge.patches.size()); - } - System.out.println("Manifest group"); - List manifestGroup = readGroup(da); - for (GroupElement ge : manifestGroup) { - System.out.printf(" %s %s %s %s; patches:%d\n", ge.node(), ge.firstParent(), ge.secondParent(), ge.cset(), ge.patches.size()); - } - while (!da.isEmpty()) { - int fnameLen = da.readInt(); - if (fnameLen <= 4) { - break; // null chunk, the last one. - } - byte[] fname = new byte[fnameLen - 4]; - da.readBytes(fname, 0, fname.length); - names.add(new String(fname)); - List fileGroup = readGroup(da); - System.out.println(names.getLast()); - for (GroupElement ge : fileGroup) { - System.out.printf(" %s %s %s %s; patches:%d\n", ge.node(), ge.firstParent(), ge.secondParent(), ge.cset(), ge.patches.size()); - } - } - } - System.out.println(names.size()); - for (String s : names) { - System.out.println(s); - } - } finally { - da.done(); - } - } - - private static List readGroup(DataAccess da) throws IOException { - int len = da.readInt(); - LinkedList rv = new LinkedList(); - while (len > 4 && !da.isEmpty()) { - byte[] nb = new byte[80]; - da.readBytes(nb, 0, 80); - int dataLength = len-84; - LinkedList patches = new LinkedList(); - while (dataLength > 0) { - RevlogStream.PatchRecord pr = RevlogStream.PatchRecord.read(da); - patches.add(pr); - dataLength -= pr.len + 12; - } - rv.add(new GroupElement(nb, patches)); - len = da.isEmpty() ? 0 : da.readInt(); - } - return rv; - } - - static class GroupElement { - private byte[] header; // byte[80] takes 120 bytes, 4 Nodeids - 192 - private List patches; - - GroupElement(byte[] fourNodeids, List patchList) { - assert fourNodeids != null && fourNodeids.length == 80; - // patchList.size() > 0 - header = fourNodeids; - patches = patchList; - } - public Nodeid node() { - return Nodeid.fromBinary(header, 0); - } - public Nodeid firstParent() { - return Nodeid.fromBinary(header, 20); - } - public Nodeid secondParent() { - return Nodeid.fromBinary(header, 40); - } - public Nodeid cset() { // cs seems to be changeset - return Nodeid.fromBinary(header, 60); - } - } -} diff -r 0d279bcc4442 -r 6f1b88693d48 src/com/tmate/hgkit/ll/HgDataFile.java --- a/src/com/tmate/hgkit/ll/HgDataFile.java Sun Jan 23 04:06:18 2011 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,60 +0,0 @@ -/* - * Copyright (c) 2010, 2011 Artem Tikhomirov - */ -package com.tmate.hgkit.ll; - -import static com.tmate.hgkit.ll.HgRepository.TIP; - - -/** - * ? name:HgFileNode? - * @author artem - */ -public class HgDataFile extends Revlog { - - // absolute from repo root? - // slashes, unix-style? - // repo location agnostic, just to give info to user, not to access real storage - private final String path; - - /*package-local*/HgDataFile(HgRepository hgRepo, String path, RevlogStream content) { - super(hgRepo, content); - this.path = path; - } - - public boolean exists() { - return content != null; // XXX need better impl - } - - public String getPath() { - return path; // hgRepo.backresolve(this) -> name? - } - - public int length(Nodeid nodeid) { - return content.dataLength(getLocalRevisionNumber(nodeid)); - } - - public byte[] content() { - return content(TIP); - } - - public void history(Changeset.Inspector inspector) { - history(0, content.revisionCount() - 1, inspector); - } - - public void history(int start, int end, Changeset.Inspector inspector) { - if (!exists()) { - throw new IllegalStateException("Can't get history of invalid repository file node"); - } - final int[] commitRevisions = new int[end - start + 1]; - Revlog.Inspector insp = new Revlog.Inspector() { - int count = 0; - - public void next(int revisionNumber, int actualLen, int baseRevision, int linkRevision, int parent1Revision, int parent2Revision, byte[] nodeid, byte[] data) { - commitRevisions[count++] = linkRevision; - } - }; - content.iterate(start, end, false, insp); - getRepo().getChangelog().range(inspector, commitRevisions); - } -} diff -r 0d279bcc4442 -r 6f1b88693d48 src/com/tmate/hgkit/ll/HgDirstate.java --- a/src/com/tmate/hgkit/ll/HgDirstate.java Sun Jan 23 04:06:18 2011 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,163 +0,0 @@ -/* - * Copyright (c) 2010, 2011 Artem Tikhomirov - */ -package com.tmate.hgkit.ll; - -import java.io.File; -import java.io.IOException; -import java.util.Collections; -import java.util.LinkedHashMap; -import java.util.Map; -import java.util.TreeSet; - -import com.tmate.hgkit.fs.DataAccess; -import com.tmate.hgkit.fs.DataAccessProvider; - -/** - * @see http://mercurial.selenic.com/wiki/DirState - * @see http://mercurial.selenic.com/wiki/FileFormats#dirstate - * @author artem - */ -public class HgDirstate { - - private final DataAccessProvider accessProvider; - private final File dirstateFile; - private Map normal; - private Map added; - private Map removed; - private Map merged; - - /*package-local*/ HgDirstate() { - // empty instance - accessProvider = null; - dirstateFile = null; - } - - public HgDirstate(DataAccessProvider dap, File dirstate) { - accessProvider = dap; - dirstateFile = dirstate; - } - - private void read() { - normal = added = removed = merged = Collections.emptyMap(); - if (dirstateFile == null || !dirstateFile.exists()) { - return; - } - DataAccess da = accessProvider.create(dirstateFile); - if (da.isEmpty()) { - return; - } - // not sure linked is really needed here, just for ease of debug - normal = new LinkedHashMap(); - added = new LinkedHashMap(); - removed = new LinkedHashMap(); - merged = new LinkedHashMap(); - try { - // XXX skip(40) if we don't need these? - byte[] parents = new byte[40]; - da.readBytes(parents, 0, 40); - parents = null; - do { - final byte state = da.readByte(); - final int fmode = da.readInt(); - final int size = da.readInt(); - final int time = da.readInt(); - final int nameLen = da.readInt(); - String fn1 = null, fn2 = null; - byte[] name = new byte[nameLen]; - da.readBytes(name, 0, nameLen); - for (int i = 0; i < nameLen; i++) { - if (name[i] == 0) { - fn1 = new String(name, 0, i, "UTF-8"); // XXX unclear from documentation what encoding is used there - fn2 = new String(name, i+1, nameLen - i - 1, "UTF-8"); // need to check with different system codepages - break; - } - } - if (fn1 == null) { - fn1 = new String(name); - } - Record r = new Record(fmode, size, time, fn1, fn2); - if (state == 'n') { - normal.put(r.name1, r); - } else if (state == 'a') { - added.put(r.name1, r); - } else if (state == 'r') { - removed.put(r.name1, r); - } else if (state == 'm') { - merged.put(r.name1, r); - } else { - // FIXME log error? - } - } while (!da.isEmpty()); - } catch (IOException ex) { - ex.printStackTrace(); // FIXME log error, clean dirstate? - } finally { - da.done(); - } - } - - // new, modifiable collection - /*package-local*/ TreeSet all() { - read(); - TreeSet rv = new TreeSet(); - @SuppressWarnings("unchecked") - Map[] all = new Map[] { normal, added, removed, merged }; - for (int i = 0; i < all.length; i++) { - for (Record r : all[i].values()) { - rv.add(r.name1); - } - } - return rv; - } - - /*package-local*/ Record checkNormal(String fname) { - return normal.get(fname); - } - - /*package-local*/ Record checkAdded(String fname) { - return added.get(fname); - } - /*package-local*/ Record checkRemoved(String fname) { - return removed.get(fname); - } - /*package-local*/ Record checkMerged(String fname) { - return merged.get(fname); - } - - - - - public void dump() { - read(); - @SuppressWarnings("unchecked") - Map[] all = new Map[] { normal, added, removed, merged }; - char[] x = new char[] {'n', 'a', 'r', 'm' }; - for (int i = 0; i < all.length; i++) { - for (Record r : all[i].values()) { - System.out.printf("%c %3o%6d %30tc\t\t%s", x[i], r.mode, r.size, (long) r.time * 1000, r.name1); - if (r.name2 != null) { - System.out.printf(" --> %s", r.name2); - } - System.out.println(); - } - System.out.println(); - } - } - - /*package-local*/ static class Record { - final int mode; - final int size; - final int time; - final String name1; - final String name2; - - public Record(int fmode, int fsize, int ftime, String name1, String name2) { - mode = fmode; - size = fsize; - time = ftime; - this.name1 = name1; - this.name2 = name2; - - } - } -} diff -r 0d279bcc4442 -r 6f1b88693d48 src/com/tmate/hgkit/ll/HgIgnore.java --- a/src/com/tmate/hgkit/ll/HgIgnore.java Sun Jan 23 04:06:18 2011 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,75 +0,0 @@ -/* - * Copyright (c) 2010 Artem Tikhomirov - */ -package com.tmate.hgkit.ll; - -import java.io.BufferedReader; -import java.io.File; -import java.io.FileReader; -import java.io.IOException; -import java.util.Collections; -import java.util.Set; -import java.util.TreeSet; - -/** - * - * @author artem - */ -public class HgIgnore { - - private final LocalHgRepo repo; - private Set entries; - - public HgIgnore(LocalHgRepo localRepo) { - this.repo = localRepo; - } - - private void read() { - entries = Collections.emptySet(); - File hgignoreFile = new File(repo.getRepositoryRoot().getParentFile(), ".hgignore"); - if (!hgignoreFile.exists()) { - return; - } - entries = new TreeSet(); - try { - BufferedReader fr = new BufferedReader(new FileReader(hgignoreFile)); - String line; - while ((line = fr.readLine()) != null) { - // FIXME need to detect syntax:glob and other parameters - entries.add(line.trim()); // shall I account for local paths in the file (i.e. back-slashed on windows)? - } - } catch (IOException ex) { - ex.printStackTrace(); // log warn - } - } - - public void reset() { - // FIXME does anyone really need to clear HgIgnore? Perhaps, repo may return new instance each time, - // which is used throughout invocation and then discarded? - entries = null; - } - - public boolean isIgnored(String path) { - if (entries == null) { - read(); - } - if (entries.contains(path)) { - // easy part - return true; - } - // substrings are memory-friendly - int x = 0, i = path.indexOf('/', 0); - while (i != -1) { - if (entries.contains(path.substring(x, i))) { - return true; - } - // try one with ending slash - if (entries.contains(path.substring(x, i+1))) { // even if i is last index, i+1 is safe here - return true; - } - x = i+1; - i = path.indexOf('/', x); - } - return false; - } -} diff -r 0d279bcc4442 -r 6f1b88693d48 src/com/tmate/hgkit/ll/HgManifest.java --- a/src/com/tmate/hgkit/ll/HgManifest.java Sun Jan 23 04:06:18 2011 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,63 +0,0 @@ -/* - * Copyright (c) 2010, 2011 Artem Tikhomirov - */ -package com.tmate.hgkit.ll; - -/** - * - * @author artem - */ -public class HgManifest extends Revlog { - - /*package-local*/ HgManifest(HgRepository hgRepo, RevlogStream content) { - super(hgRepo, content); - } - - public void walk(int start, int end, final Inspector inspector) { - Revlog.Inspector insp = new Revlog.Inspector() { - - private boolean gtg = true; // good to go - - public void next(int revisionNumber, int actualLen, int baseRevision, int linkRevision, int parent1Revision, int parent2Revision, byte[] nodeid, byte[] data) { - if (!gtg) { - return; - } - gtg = gtg && inspector.begin(revisionNumber, new Nodeid(nodeid, true)); - int i; - String fname = null; - String flags = null; - Nodeid nid = null; - for (i = 0; gtg && i < actualLen; i++) { - int x = i; - for( ; data[i] != '\n' && i < actualLen; i++) { - if (fname == null && data[i] == 0) { - fname = new String(data, x, i - x); - x = i+1; - } - } - if (i < actualLen) { - assert data[i] == '\n'; - int nodeidLen = i - x < 40 ? i-x : 40; - nid = Nodeid.fromAscii(data, x, nodeidLen); - if (nodeidLen + x < i) { - // 'x' and 'l' for executable bits and symlinks? - // hg --debug manifest shows 644 for each regular file in my repo - flags = new String(data, x + nodeidLen, i-x-nodeidLen); - } - gtg = gtg && inspector.next(nid, fname, flags); - } - nid = null; - fname = flags = null; - } - gtg = gtg && inspector.end(revisionNumber); - } - }; - content.iterate(start, end, true, insp); - } - - public interface Inspector { - boolean begin(int revision, Nodeid nid); - boolean next(Nodeid nid, String fname, String flags); - boolean end(int revision); - } -} diff -r 0d279bcc4442 -r 6f1b88693d48 src/com/tmate/hgkit/ll/HgRepository.java --- a/src/com/tmate/hgkit/ll/HgRepository.java Sun Jan 23 04:06:18 2011 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,79 +0,0 @@ -/* - * Copyright (c) 2010, 2011 Artem Tikhomirov - */ -package com.tmate.hgkit.ll; - -import org.tmatesoft.hg.core.Path; -import org.tmatesoft.hg.util.PathRewrite; - - -/** - * Shall be as state-less as possible, all the caching happens outside the repo, in commands/walkers - * @author artem - */ -public abstract class HgRepository { - - public static final int TIP = -1; - public static final int BAD_REVISION = Integer.MIN_VALUE; - public static final int WORKING_COPY = -2; - - // temp aux marker method - public static IllegalStateException notImplemented() { - return new IllegalStateException("Not implemented"); - } - - private Changelog changelog; - private HgManifest manifest; - private HgTags tags; - - private boolean isInvalid = true; - - public boolean isInvalid() { - return this.isInvalid; - } - - protected void setInvalid(boolean invalid) { - isInvalid = invalid; - } - - public final Changelog getChangelog() { - if (this.changelog == null) { - // might want delegate to protected createChangelog() some day - RevlogStream content = resolve(toStoragePath("00changelog.i", false)); // XXX perhaps, knowledge about filenames should be in LocalHgRepo? - this.changelog = new Changelog(this, content); - } - return this.changelog; - } - - public final HgManifest getManifest() { - if (this.manifest == null) { - RevlogStream content = resolve(toStoragePath("00manifest.i", false)); - this.manifest = new HgManifest(this, content); - } - return this.manifest; - } - - public final HgTags getTags() { - if (tags == null) { - tags = createTags(); - } - return tags; - } - - protected abstract HgTags createTags(); - - public abstract HgDataFile getFileNode(String path); - public abstract HgDataFile getFileNode(Path path); - - public abstract String getLocation(); - - public abstract PathRewrite getPathHelper(); - - - protected abstract String toStoragePath(String path, boolean isData); - - /** - * Perhaps, should be separate interface, like ContentLookup - */ - protected abstract RevlogStream resolve(String repositoryPath); -} diff -r 0d279bcc4442 -r 6f1b88693d48 src/com/tmate/hgkit/ll/HgTags.java --- a/src/com/tmate/hgkit/ll/HgTags.java Sun Jan 23 04:06:18 2011 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,23 +0,0 @@ -/* - * Copyright (c) 2011 Artem Tikhomirov - */ -package com.tmate.hgkit.ll; - -import java.util.Collections; -import java.util.List; - -/** - * FIXME Place-holder, implement - * @author artem - */ -public class HgTags { - - public List tags(Nodeid nid) { - return Collections.emptyList(); - } - - public boolean isTagged(Nodeid nid) { - // TODO implement - return false; - } -} diff -r 0d279bcc4442 -r 6f1b88693d48 src/com/tmate/hgkit/ll/Internals.java --- a/src/com/tmate/hgkit/ll/Internals.java Sun Jan 23 04:06:18 2011 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,38 +0,0 @@ -/* - * Copyright (c) 2011 Artem Tikhomirov - */ -package com.tmate.hgkit.ll; - -/** - * DO NOT USE THIS CLASS, INTENDED FOR TESTING PURPOSES. - * - * Debug helper, to access otherwise restricted (package-local) methods - * - * @author artem - */ -public class Internals { - - private final HgRepository repo; - - public Internals(HgRepository hgRepo) { - this.repo = hgRepo; - } - - public void dumpDirstate() { - if (repo instanceof LocalHgRepo) { - ((LocalHgRepo) repo).loadDirstate().dump(); - } - } - - public boolean[] checkIgnored(String... toCheck) { - if (repo instanceof LocalHgRepo) { - HgIgnore ignore = ((LocalHgRepo) repo).loadIgnore(); - boolean[] rv = new boolean[toCheck.length]; - for (int i = 0; i < toCheck.length; i++) { - rv[i] = ignore.isIgnored(toCheck[i]); - } - return rv; - } - return new boolean[0]; - } -} diff -r 0d279bcc4442 -r 6f1b88693d48 src/com/tmate/hgkit/ll/LocalHgRepo.java --- a/src/com/tmate/hgkit/ll/LocalHgRepo.java Sun Jan 23 04:06:18 2011 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,255 +0,0 @@ -/* - * Copyright (c) 2010, 2011 Artem Tikhomirov - */ -package com.tmate.hgkit.ll; - -import java.io.BufferedReader; -import java.io.File; -import java.io.FileInputStream; -import java.io.IOException; -import java.io.InputStreamReader; -import java.lang.ref.SoftReference; -import java.util.Arrays; -import java.util.HashMap; -import java.util.TreeSet; - -import org.tmatesoft.hg.core.Path; -import org.tmatesoft.hg.util.PathRewrite; - -import com.tmate.hgkit.fs.DataAccessProvider; -import com.tmate.hgkit.fs.FileWalker; - -/** - * @author artem - */ -public class LocalHgRepo extends HgRepository { - - private File repoDir; // .hg folder - private final String repoLocation; - private final DataAccessProvider dataAccess; - private final PathRewrite normalizePath = new PathRewrite() { - - public String rewrite(String path) { - return normalize(path); - } - }; - - public LocalHgRepo(String repositoryPath) { - setInvalid(true); - repoLocation = repositoryPath; - dataAccess = null; - } - - public LocalHgRepo(File repositoryRoot) throws IOException { - assert ".hg".equals(repositoryRoot.getName()) && repositoryRoot.isDirectory(); - setInvalid(false); - repoDir = repositoryRoot; - repoLocation = repositoryRoot.getParentFile().getCanonicalPath(); - dataAccess = new DataAccessProvider(); - parseRequires(); - } - - @Override - public String getLocation() { - return repoLocation; - } - - public FileWalker createWorkingDirWalker() { - return new FileWalker(repoDir.getParentFile()); - } - - // XXX package-local, unless there are cases when required from outside (guess, working dir/revision walkers may hide dirstate access and no public visibility needed) - final HgDirstate loadDirstate() { - return new HgDirstate(getDataAccess(), new File(repoDir, "dirstate")); - } - - // package-local, see comment for loadDirstate - public final HgIgnore loadIgnore() { - return new HgIgnore(this); - } - - /*package-local*/ DataAccessProvider getDataAccess() { - return dataAccess; - } - - /*package-local*/ File getRepositoryRoot() { - return repoDir; - } - - @Override - protected HgTags createTags() { - return new HgTags(); - } - - private final HashMap> streamsCache = new HashMap>(); - - /** - * path - repository storage path (i.e. one usually with .i or .d) - */ - @Override - protected RevlogStream resolve(String path) { - final SoftReference ref = streamsCache.get(path); - RevlogStream cached = ref == null ? null : ref.get(); - if (cached != null) { - return cached; - } - File f = new File(repoDir, path); - if (f.exists()) { - RevlogStream s = new RevlogStream(dataAccess, f); - streamsCache.put(path, new SoftReference(s)); - return s; - } - return null; - } - - @Override - public HgDataFile getFileNode(String path) { - String nPath = normalize(path); - String storagePath = toStoragePath(nPath, true); - RevlogStream content = resolve(storagePath); - // XXX no content when no file? or HgDataFile.exists() to detect that? How about files that were removed in previous releases? - return new HgDataFile(this, nPath, content); - } - - @Override - public HgDataFile getFileNode(Path path) { - return getFileNode(path.toString()); - } - - @Override - public PathRewrite getPathHelper() { - return normalizePath; - } - - private boolean revlogv1; - private boolean store; - private boolean fncache; - private boolean dotencode; - - - private void parseRequires() { - File requiresFile = new File(repoDir, "requires"); - if (!requiresFile.exists()) { - return; - } - try { - BufferedReader br = new BufferedReader(new InputStreamReader(new FileInputStream(requiresFile))); - String line; - while ((line = br.readLine()) != null) { - revlogv1 |= "revlogv1".equals(line); - store |= "store".equals(line); - fncache |= "fncache".equals(line); - dotencode |= "dotencode".equals(line); - } - } catch (IOException ex) { - ex.printStackTrace(); // FIXME log - } - } - - // FIXME document what path argument is, whether it includes .i or .d, and whether it's 'normalized' (slashes) or not. - // since .hg/store keeps both .i files and files without extension (e.g. fncache), guees, for data == false - // we shall assume path has extension - // FIXME much more to be done, see store.py:_hybridencode - // @see http://mercurial.selenic.com/wiki/CaseFoldingPlan - @Override - protected String toStoragePath(String path, boolean data) { - path = normalize(path); - final String STR_STORE = "store/"; - final String STR_DATA = "data/"; - final String STR_DH = "dh/"; - if (!data) { - return this.store ? STR_STORE + path : path; - } - path = path.replace(".hg/", ".hg.hg/").replace(".i/", ".i.hg/").replace(".d/", ".d.hg/"); - StringBuilder sb = new StringBuilder(path.length() << 1); - if (store || fncache) { - // encodefilename - final String reservedChars = "\\:*?\"<>|"; - // in fact, \\ is unlikely to match, ever - we've replaced all of them already, above. Just regards to store.py - int x; - char[] hexByte = new char[2]; - for (int i = 0; i < path.length(); i++) { - final char ch = path.charAt(i); - if (ch >= 'a' && ch <= 'z') { - sb.append(ch); // POIRAE - } else if (ch >= 'A' && ch <= 'Z') { - sb.append('_'); - sb.append(Character.toLowerCase(ch)); // Perhaps, (char) (((int) ch) + 32)? Even better, |= 0x20? - } else if ( (x = reservedChars.indexOf(ch)) != -1) { - sb.append('~'); - sb.append(toHexByte(reservedChars.charAt(x), hexByte)); - } else if ((ch >= '~' /*126*/ && ch <= 255) || ch < ' ' /*32*/) { - sb.append('~'); - sb.append(toHexByte(ch, hexByte)); - } else if (ch == '_') { - // note, encoding from store.py:_buildencodefun and :_build_lower_encodefun - // differ in the way they process '_' (latter doesn't escape it) - sb.append('_'); - sb.append('_'); - } else { - sb.append(ch); - } - } - // auxencode - if (fncache) { - x = 0; // last segment start - final TreeSet windowsReservedFilenames = new TreeSet(); - windowsReservedFilenames.addAll(Arrays.asList("con prn aux nul com1 com2 com3 com4 com5 com6 com7 com8 com9 lpt1 lpt2 lpt3 lpt4 lpt5 lpt6 lpt7 lpt8 lpt9".split(" "))); - do { - int i = sb.indexOf("/", x); - if (i == -1) { - i = sb.length(); - } - // windows reserved filenames are at least of length 3 - if (i - x >= 3) { - boolean found = false; - if (i-x == 3) { - found = windowsReservedFilenames.contains(sb.subSequence(x, i)); - } else if (sb.charAt(x+3) == '.') { // implicit i-x > 3 - found = windowsReservedFilenames.contains(sb.subSequence(x, x+3)); - } else if (i-x > 4 && sb.charAt(x+4) == '.') { - found = windowsReservedFilenames.contains(sb.subSequence(x, x+4)); - } - if (found) { - sb.setCharAt(x, '~'); - sb.insert(x+1, toHexByte(sb.charAt(x+2), hexByte)); - i += 2; - } - } - if (dotencode && (sb.charAt(x) == '.' || sb.charAt(x) == ' ')) { - sb.insert(x+1, toHexByte(sb.charAt(x), hexByte)); - sb.setCharAt(x, '~'); // setChar *after* charAt/insert to get ~2e, not ~7e for '.' - i += 2; - } - x = i+1; - } while (x < sb.length()); - } - } - final int MAX_PATH_LEN_IN_HGSTORE = 120; - if (fncache && (sb.length() + STR_DATA.length() > MAX_PATH_LEN_IN_HGSTORE)) { - throw HgRepository.notImplemented(); // FIXME digest and fncache use - } - if (this.store) { - sb.insert(0, STR_STORE + STR_DATA); - } - sb.append(".i"); - return sb.toString(); - } - - private static char[] toHexByte(int ch, char[] buf) { - assert buf.length > 1; - final String hexDigits = "0123456789abcdef"; - buf[0] = hexDigits.charAt((ch & 0x00F0) >>> 4); - buf[1] = hexDigits.charAt(ch & 0x0F); - return buf; - } - - // TODO handle . and .. (although unlikely to face them from GUI client) - private static String normalize(String path) { - path = path.replace('\\', '/').replace("//", "/"); - if (path.startsWith("/")) { - path = path.substring(1); - } - return path; - } -} diff -r 0d279bcc4442 -r 6f1b88693d48 src/com/tmate/hgkit/ll/Nodeid.java --- a/src/com/tmate/hgkit/ll/Nodeid.java Sun Jan 23 04:06:18 2011 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,123 +0,0 @@ -/* - * Copyright (c) 2010, 2011 Artem Tikhomirov - */ -package com.tmate.hgkit.ll; - -import static com.tmate.hgkit.ll.DigestHelper.toHexString; - -import java.util.Arrays; - - - -/** - * Whether to store fixed size array (20 bytes) - ease of manipulation (e.g. hashcode/equals), or - * memory effective - reuse supplied array, keep significant bits only? - * Fixed size array looks most appealing to me now - I doubt one can save any significant amount of memory. - * There'd always 20 non-zero bytes, the difference is only for any extra bytes one may pass to constructor - * @author artem - * - */ -public final class Nodeid { - - public static final Nodeid NULL = new Nodeid(new byte[20], false); - private final byte[] binaryData; - - /** - * @param binaryRepresentation - byte[20], kept by reference - * @param shallClone - true if array is subject to future modification and shall be copied, not referenced - */ - public Nodeid(byte[] binaryRepresentation, boolean shallClone) { - // 5 int fields => 32 bytes - // byte[20] => 48 bytes - if (binaryRepresentation == null || binaryRepresentation.length != 20) { - throw new IllegalArgumentException(); - } - this.binaryData = shallClone ? binaryRepresentation.clone() : binaryRepresentation; - } - - @Override - public int hashCode() { - // digest (part thereof) seems to be nice candidate for the hashCode - byte[] b = binaryData; - return b[0] << 24 | (b[1] & 0xFF) << 16 | (b[2] & 0xFF) << 8 | (b[3] & 0xFF); - } - - @Override - public boolean equals(Object o) { - if (o instanceof Nodeid) { - return Arrays.equals(this.binaryData, ((Nodeid) o).binaryData); - } - return false; - } - - public boolean equalsTo(byte[] buf) { - return Arrays.equals(this.binaryData, buf); - } - - @Override - public String toString() { - // XXX may want to output just single 0 for the NULL id? - return toHexString(binaryData, 0, binaryData.length); - } - - public String shortNotation() { - return toHexString(binaryData, 0, 6); - } - - public boolean isNull() { - if (this == NULL) { - return true; - } - for (int i = 0; i < 20; i++) { - if (this.binaryData[i] != 0) { - return false; - } - } - return true; - } - - // primary purpose is to give DigestHelper access to internal structure. Despite it's friends-only (package visibility), it's still makes sense to - // return a copy, to avoid any accidental modification (same reason field is not made visible, nor any callback, e.g. Output.write(byte[]) was introduced) - /*package-local*/byte[] cloneData() { - return binaryData.clone(); - } - - // primary difference with cons is handling of NULL id (this method returns constant) - // always makes a copy of an array passed - public static Nodeid fromBinary(byte[] binaryRepresentation, int offset) { - if (binaryRepresentation == null || binaryRepresentation.length - offset < 20) { - throw new IllegalArgumentException(); - } - int i = 0; - while (i < 20 && binaryRepresentation[offset+i] == 0) i++; - if (i == 20) { - return NULL; - } - if (offset == 0 && binaryRepresentation.length == 20) { - return new Nodeid(binaryRepresentation, true); - } - byte[] b = new byte[20]; // create new instance if no other reasonable guesses possible - System.arraycopy(binaryRepresentation, offset, b, 0, 20); - return new Nodeid(b, false); - } - - // binascii.unhexlify() - public static Nodeid fromAscii(byte[] asciiRepresentation, int offset, int length) { - if (length != 40) { - throw new IllegalArgumentException(); - } - byte[] data = new byte[20]; - boolean zeroBytes = true; - for (int i = 0, j = offset; i < data.length; i++) { - int hiNibble = Character.digit(asciiRepresentation[j++], 16); - int lowNibble = Character.digit(asciiRepresentation[j++], 16); - byte b = (byte) (((hiNibble << 4) | lowNibble) & 0xFF); - data[i] = b; - zeroBytes = zeroBytes && b == 0; - } - if (zeroBytes) { - return NULL; - } - return new Nodeid(data, false); - } -} diff -r 0d279bcc4442 -r 6f1b88693d48 src/com/tmate/hgkit/ll/Revlog.java --- a/src/com/tmate/hgkit/ll/Revlog.java Sun Jan 23 04:06:18 2011 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,242 +0,0 @@ -/* - * Copyright (c) 2010, 2011 Artem Tikhomirov - */ -package com.tmate.hgkit.ll; - -import static com.tmate.hgkit.ll.HgRepository.TIP; - -import java.util.Arrays; -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.LinkedHashSet; -import java.util.Map; -import java.util.Set; - -/** - * - * @author artem - */ -public abstract class Revlog { - - private final HgRepository hgRepo; - protected final RevlogStream content; - - protected Revlog(HgRepository hgRepo, RevlogStream content) { - if (hgRepo == null) { - throw new NullPointerException(); - } - this.hgRepo = hgRepo; - this.content = content; - } - - public final HgRepository getRepo() { - return hgRepo; - } - - public int getRevisionCount() { - return content.revisionCount(); - } - - public int getLocalRevisionNumber(Nodeid nid) { - int revision = content.findLocalRevisionNumber(nid); - if (revision == Integer.MIN_VALUE) { - throw new IllegalArgumentException(String.format("%s doesn't represent a revision of %s", nid.toString(), this /*XXX HgDataFile.getPath might be more suitable here*/)); - } - return revision; - } - - // Till now, i follow approach that NULL nodeid is never part of revlog - public boolean isKnown(Nodeid nodeid) { - final int rn = content.findLocalRevisionNumber(nodeid); - if (Integer.MIN_VALUE == rn) { - return false; - } - if (rn < 0 || rn >= content.revisionCount()) { - // Sanity check - throw new IllegalStateException(); - } - return true; - } - - /** - * Access to revision data as is (decompressed, but otherwise unprocessed, i.e. not parsed for e.g. changeset or manifest entries) - * @param nodeid - */ - public byte[] content(Nodeid nodeid) { - return content(getLocalRevisionNumber(nodeid)); - } - - /** - * @param revision - repo-local index of this file change (not a changelog revision number!) - */ - public byte[] content(int revision) { - final byte[][] dataPtr = new byte[1][]; - Revlog.Inspector insp = new Revlog.Inspector() { - public void next(int revisionNumber, int actualLen, int baseRevision, int linkRevision, int parent1Revision, int parent2Revision, byte[] nodeid, byte[] data) { - dataPtr[0] = data; - } - }; - content.iterate(revision, revision, true, insp); - return dataPtr[0]; - } - - /** - * XXX perhaps, return value Nodeid[2] and boolean needNodeids is better (and higher level) API for this query? - * - * @param revision - revision to query parents, or {@link HgRepository#TIP} - * @param parentRevisions - int[2] to get local revision numbers of parents (e.g. {6, -1}) - * @param parent1 - byte[20] or null, if parent's nodeid is not needed - * @param parent2 - byte[20] or null, if second parent's nodeid is not needed - * @return - */ - public void parents(int revision, int[] parentRevisions, byte[] parent1, byte[] parent2) { - if (revision != TIP && !(revision >= 0 && revision < content.revisionCount())) { - throw new IllegalArgumentException(String.valueOf(revision)); - } - if (parentRevisions == null || parentRevisions.length < 2) { - throw new IllegalArgumentException(String.valueOf(parentRevisions)); - } - if (parent1 != null && parent1.length < 20) { - throw new IllegalArgumentException(parent1.toString()); - } - if (parent2 != null && parent2.length < 20) { - throw new IllegalArgumentException(parent2.toString()); - } - class ParentCollector implements Revlog.Inspector { - public int p1 = -1; - public int p2 = -1; - public byte[] nodeid; - - public void next(int revisionNumber, int actualLen, int baseRevision, int linkRevision, int parent1Revision, int parent2Revision, byte[] nodeid, byte[] data) { - p1 = parent1Revision; - p2 = parent2Revision; - this.nodeid = new byte[20]; - // nodeid arg now comes in 32 byte from (as in file format description), however upper 12 bytes are zeros. - System.arraycopy(nodeid, nodeid.length > 20 ? nodeid.length - 20 : 0, this.nodeid, 0, 20); - } - }; - ParentCollector pc = new ParentCollector(); - content.iterate(revision, revision, false, pc); - parentRevisions[0] = pc.p1; - parentRevisions[1] = pc.p2; - if (parent1 != null) { - if (parentRevisions[0] == -1) { - Arrays.fill(parent1, 0, 20, (byte) 0); - } else { - content.iterate(parentRevisions[0], parentRevisions[0], false, pc); - System.arraycopy(pc.nodeid, 0, parent1, 0, 20); - } - } - if (parent2 != null) { - if (parentRevisions[1] == -1) { - Arrays.fill(parent2, 0, 20, (byte) 0); - } else { - content.iterate(parentRevisions[1], parentRevisions[1], false, pc); - System.arraycopy(pc.nodeid, 0, parent2, 0, 20); - } - } - } - - // FIXME byte[] data might be too expensive, for few usecases it may be better to have intermediate Access object (when we don't need full data - // instantly - e.g. calculate hash, or comparing two revisions - // XXX seems that RevlogStream is better place for this class. - public interface Inspector { - // XXX boolean retVal to indicate whether to continue? - // TODO specify nodeid and data length, and reuse policy (i.e. if revlog stream doesn't reuse nodeid[] for each call) - void next(int revisionNumber, int actualLen, int baseRevision, int linkRevision, int parent1Revision, int parent2Revision, byte[/*20*/] nodeid, byte[] data); - } - - /* - * XXX think over if it's better to do either: - * pw = getChangelog().new ParentWalker(); pw.init() and pass pw instance around as needed - * or - * add Revlog#getParentWalker(), static class, make cons() and #init package-local, and keep SoftReference to allow walker reuse. - * - * and yes, walker is not a proper name - */ - public final class ParentWalker { - private Map firstParent; - private Map secondParent; - private Set allNodes; - - public ParentWalker() { - firstParent = secondParent = Collections.emptyMap(); - allNodes = Collections.emptySet(); - } - - public void init() { - final RevlogStream stream = Revlog.this.content; - final int revisionCount = stream.revisionCount(); - firstParent = new HashMap(revisionCount); - secondParent = new HashMap(firstParent.size() >> 1); // assume branches/merges are less frequent - allNodes = new LinkedHashSet(); - - Inspector insp = new Inspector() { - final Nodeid[] sequentialRevisionNodeids = new Nodeid[revisionCount]; - int ix = 0; - public void next(int revisionNumber, int actualLen, int baseRevision, int linkRevision, int parent1Revision, int parent2Revision, byte[] nodeid, byte[] data) { - if (ix != revisionNumber) { - // XXX temp code, just to make sure I understand what's going on here - throw new IllegalStateException(); - } - if (parent1Revision >= revisionNumber || parent2Revision >= revisionNumber) { - throw new IllegalStateException(); // sanity, revisions are sequential - } - final Nodeid nid = new Nodeid(nodeid, true); - sequentialRevisionNodeids[ix++] = nid; - allNodes.add(nid); - if (parent1Revision != -1) { - firstParent.put(nid, sequentialRevisionNodeids[parent1Revision]); - if (parent2Revision != -1) { - secondParent.put(nid, sequentialRevisionNodeids[parent2Revision]); - } - } - } - }; - stream.iterate(0, -1, false, insp); - } - - public Set allNodes() { - return Collections.unmodifiableSet(allNodes); - } - - // FIXME need to decide whether Nodeid(00 * 20) is always known or not - public boolean knownNode(Nodeid nid) { - return allNodes.contains(nid); - } - - // null if none - public Nodeid firstParent(Nodeid nid) { - return firstParent.get(nid); - } - - // never null, Nodeid.NULL if none known - public Nodeid safeFirstParent(Nodeid nid) { - Nodeid rv = firstParent(nid); - return rv == null ? Nodeid.NULL : rv; - } - - public Nodeid secondParent(Nodeid nid) { - return secondParent.get(nid); - } - - public Nodeid safeSecondParent(Nodeid nid) { - Nodeid rv = secondParent(nid); - return rv == null ? Nodeid.NULL : rv; - } - - public boolean appendParentsOf(Nodeid nid, Collection c) { - Nodeid p1 = firstParent(nid); - boolean modified = false; - if (p1 != null) { - modified = c.add(p1); - Nodeid p2 = secondParent(nid); - if (p2 != null) { - modified = c.add(p2) || modified; - } - } - return modified; - } - } -} diff -r 0d279bcc4442 -r 6f1b88693d48 src/com/tmate/hgkit/ll/RevlogIndexStreamAccess.java --- a/src/com/tmate/hgkit/ll/RevlogIndexStreamAccess.java Sun Jan 23 04:06:18 2011 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,60 +0,0 @@ -/** - * Copyright (c) 2010 Artem Tikhomirov - */ -package com.tmate.hgkit.ll; - -import java.io.DataInput; -import java.io.IOException; - -/** - * @author artem - * - */ -public class RevlogIndexStreamAccess { - - private final RevlogStream stream; - - // takes RevlogStream. RevlogStream delegates calls for data to this accessor, which in turn refers back to RevlogStream to get - // correct [Input|Data]Stream according to revlog version (Revlogv0 or RevlogNG) - - public RevlogIndexStreamAccess(RevlogStream stream) { - this.stream = stream; - // TODO Auto-generated constructor stub - } - - - void readRevlogV0Record() throws IOException { - DataInput di = null; //FIXME stream.getIndexStream(); - int offset = di.readInt(); - int compressedLen = di.readInt(); - int baseRevision = di.readInt(); - int linkRevision = di.readInt(); -// int r = (((buf[0] & 0xff) << 24) | ((buf[1] & 0xff) << 16) | ((buf[2] & 0xff) << 8) | (buf[3] & 0xff)); - byte[] buf = new byte[20]; - di.readFully(buf, 0, 20); - Object nodeidOwn = buf.clone(); - // XXX nodeid as an Object with hash/equals? - di.readFully(buf, 0, 20); - Object nodeidParent1 = buf.clone(); - di.readFully(buf, 0, 20); - Object nodeidParent2 = buf.clone(); - } - - // another subclass? - void readRevlogNGRecord() throws IOException { - DataInput di = null; //FIXME stream.getIndexStream(); - long l = di.readLong(); - long offset = l >>> 16; - int flags = (int) (l & 0X0FFFF); - int compressedLen = di.readInt(); - int actualLen = di.readInt(); - int baseRevision = di.readInt(); - int linkRevision = di.readInt(); - int parent1Revision = di.readInt(); - int parent2Revision = di.readInt(); - byte[] buf = new byte[32]; - di.readFully(buf, 0, 20+12); - Object nodeid = buf/*[0..20]*/; - - } -} diff -r 0d279bcc4442 -r 6f1b88693d48 src/com/tmate/hgkit/ll/RevlogIterator.java --- a/src/com/tmate/hgkit/ll/RevlogIterator.java Sun Jan 23 04:06:18 2011 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,16 +0,0 @@ -/** - * Copyright (c) 2010 Artem Tikhomirov - */ -package com.tmate.hgkit.ll; - -/** - * To walk against revlog - * XXX consider external iterator approach - * @author artem - */ -public class RevlogIterator { - - public RevlogIterator(RevlogStream stream) { - - } -} diff -r 0d279bcc4442 -r 6f1b88693d48 src/com/tmate/hgkit/ll/RevlogStream.java --- a/src/com/tmate/hgkit/ll/RevlogStream.java Sun Jan 23 04:06:18 2011 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,357 +0,0 @@ -/* - * Copyright (c) 2010, 2011 Artem Tikhomirov - */ -package com.tmate.hgkit.ll; - -import static com.tmate.hgkit.ll.HgRepository.TIP; - -import java.io.File; -import java.io.IOException; -import java.util.ArrayList; -import java.util.Collections; -import java.util.LinkedList; -import java.util.List; -import java.util.zip.DataFormatException; -import java.util.zip.Inflater; - -import com.tmate.hgkit.fs.DataAccess; -import com.tmate.hgkit.fs.DataAccessProvider; - -/** - * ? Single RevlogStream per file per repository with accessor to record access session (e.g. with back/forward operations), - * or numerous RevlogStream with separate representation of the underlaying data (cached, lazy ChunkStream)? - * @author artem - * @see http://mercurial.selenic.com/wiki/Revlog - * @see http://mercurial.selenic.com/wiki/RevlogNG - */ -public class RevlogStream { - - private List index; // indexed access highly needed - private boolean inline = false; - private final File indexFile; - private final DataAccessProvider dataAccess; - - // if we need anything else from HgRepo, might replace DAP parameter with HgRepo and query it for DAP. - RevlogStream(DataAccessProvider dap, File indexFile) { - this.dataAccess = dap; - this.indexFile = indexFile; - } - - /*package*/ DataAccess getIndexStream() { - return dataAccess.create(indexFile); - } - - /*package*/ DataAccess getDataStream() { - final String indexName = indexFile.getName(); - File dataFile = new File(indexFile.getParentFile(), indexName.substring(0, indexName.length() - 1) + "d"); - return dataAccess.create(dataFile); - } - - public int revisionCount() { - initOutline(); - return index.size(); - } - - public int dataLength(int revision) { - // XXX in fact, use of iterate() instead of this implementation may be quite reasonable. - // - final int indexSize = revisionCount(); - DataAccess daIndex = getIndexStream(); // XXX may supply a hint that I'll need really few bytes of data (although at some offset) - if (revision == TIP) { - revision = indexSize - 1; - } - try { - int recordOffset = inline ? (int) index.get(revision).offset : revision * REVLOGV1_RECORD_SIZE; - daIndex.seek(recordOffset + 12); // 6+2+4 - int actualLen = daIndex.readInt(); - return actualLen; - } catch (IOException ex) { - ex.printStackTrace(); // log error. FIXME better handling - throw new IllegalStateException(ex); - } finally { - daIndex.done(); - } - } - - // Perhaps, RevlogStream should be limited to use of plain int revisions for access, - // while Nodeids should be kept on the level up, in Revlog. Guess, Revlog better keep - // map of nodeids, and once this comes true, we may get rid of this method. - // Unlike its counterpart, Revlog#getLocalRevisionNumber, doesn't fail with exception if node not found, - // returns a predefined constant instead - /*package-local*/ int findLocalRevisionNumber(Nodeid nodeid) { - // XXX this one may be implemented with iterate() once there's mechanism to stop iterations - final int indexSize = revisionCount(); - DataAccess daIndex = getIndexStream(); - try { - byte[] nodeidBuf = new byte[20]; - for (int i = 0; i < indexSize; i++) { - daIndex.skip(8); - int compressedLen = daIndex.readInt(); - daIndex.skip(20); - daIndex.readBytes(nodeidBuf, 0, 20); - if (nodeid.equalsTo(nodeidBuf)) { - return i; - } - daIndex.skip(inline ? 12 + compressedLen : 12); - } - } catch (IOException ex) { - ex.printStackTrace(); // log error. FIXME better handling - throw new IllegalStateException(ex); - } finally { - daIndex.done(); - } - return Integer.MIN_VALUE; - } - - - private final int REVLOGV1_RECORD_SIZE = 64; - - // should be possible to use TIP, ALL, or -1, -2, -n notation of Hg - // ? boolean needsNodeid - public void iterate(int start, int end, boolean needData, Revlog.Inspector inspector) { - initOutline(); - final int indexSize = index.size(); - if (indexSize == 0) { - return; - } - if (end == TIP) { - end = indexSize - 1; - } - if (start == TIP) { - start = indexSize - 1; - } - if (start < 0 || start >= indexSize) { - throw new IllegalArgumentException("Bad left range boundary " + start); - } - if (end < start || end >= indexSize) { - throw new IllegalArgumentException("Bad right range boundary " + end); - } - // XXX may cache [start .. end] from index with a single read (pre-read) - - DataAccess daIndex = null, daData = null; - daIndex = getIndexStream(); - if (needData && !inline) { - daData = getDataStream(); - } - try { - byte[] nodeidBuf = new byte[20]; - byte[] lastData = null; - int i; - boolean extraReadsToBaseRev = false; - if (needData && index.get(start).baseRevision < start) { - i = index.get(start).baseRevision; - extraReadsToBaseRev = true; - } else { - i = start; - } - - daIndex.seek(inline ? (int) index.get(i).offset : i * REVLOGV1_RECORD_SIZE); - for (; i <= end; i++ ) { - long l = daIndex.readLong(); - @SuppressWarnings("unused") - long offset = l >>> 16; - @SuppressWarnings("unused") - int flags = (int) (l & 0X0FFFF); - int compressedLen = daIndex.readInt(); - int actualLen = daIndex.readInt(); - int baseRevision = daIndex.readInt(); - int linkRevision = daIndex.readInt(); - int parent1Revision = daIndex.readInt(); - int parent2Revision = daIndex.readInt(); - // Hg has 32 bytes here, uses 20 for nodeid, and keeps 12 last bytes empty - daIndex.readBytes(nodeidBuf, 0, 20); - daIndex.skip(12); - byte[] data = null; - if (needData) { - byte[] dataBuf = new byte[compressedLen]; - if (inline) { - daIndex.readBytes(dataBuf, 0, compressedLen); - } else { - daData.seek(index.get(i).offset); - daData.readBytes(dataBuf, 0, compressedLen); - } - if (dataBuf[0] == 0x78 /* 'x' */) { - try { - Inflater zlib = new Inflater(); // XXX Consider reuse of Inflater, and/or stream alternative - zlib.setInput(dataBuf, 0, compressedLen); - byte[] result = new byte[actualLen*2]; // FIXME need to use zlib.finished() instead - int resultLen = zlib.inflate(result); - zlib.end(); - data = new byte[resultLen]; - System.arraycopy(result, 0, data, 0, resultLen); - } catch (DataFormatException ex) { - ex.printStackTrace(); - data = new byte[0]; // FIXME need better failure strategy - } - } else if (dataBuf[0] == 0x75 /* 'u' */) { - data = new byte[dataBuf.length - 1]; - System.arraycopy(dataBuf, 1, data, 0, data.length); - } else { - // XXX Python impl in fact throws exception when there's not 'x', 'u' or '0' - // but I don't see reason not to return data as is - data = dataBuf; - } - // XXX - if (baseRevision != i) { // XXX not sure if this is the right way to detect a patch - // this is a patch - LinkedList patches = new LinkedList(); - int patchElementIndex = 0; - do { - PatchRecord pr = PatchRecord.read(data, patchElementIndex); - patches.add(pr); - patchElementIndex += 12 + pr.len; - } while (patchElementIndex < data.length); - // - byte[] baseRevContent = lastData; - data = apply(baseRevContent, actualLen, patches); - } - } else { - if (inline) { - daIndex.skip(compressedLen); - } - } - if (!extraReadsToBaseRev || i >= start) { - inspector.next(i, actualLen, baseRevision, linkRevision, parent1Revision, parent2Revision, nodeidBuf, data); - } - lastData = data; - } - } catch (IOException ex) { - throw new IllegalStateException(ex); // FIXME need better handling - } finally { - daIndex.done(); - if (daData != null) { - daData.done(); - } - } - } - - private void initOutline() { - if (index != null && !index.isEmpty()) { - return; - } - ArrayList res = new ArrayList(); - DataAccess da = getIndexStream(); - try { - int versionField = da.readInt(); - da.readInt(); // just to skip next 2 bytes of offset + flags - final int INLINEDATA = 1 << 16; - inline = (versionField & INLINEDATA) != 0; - long offset = 0; // first offset is always 0, thus Hg uses it for other purposes - while(true) { - int compressedLen = da.readInt(); - // 8+4 = 12 bytes total read here - @SuppressWarnings("unused") - int actualLen = da.readInt(); - int baseRevision = da.readInt(); - // 12 + 8 = 20 bytes read here -// int linkRevision = di.readInt(); -// int parent1Revision = di.readInt(); -// int parent2Revision = di.readInt(); -// byte[] nodeid = new byte[32]; - if (inline) { - res.add(new IndexEntry(offset + REVLOGV1_RECORD_SIZE * res.size(), baseRevision)); - da.skip(3*4 + 32 + compressedLen); // Check: 44 (skip) + 20 (read) = 64 (total RevlogNG record size) - } else { - res.add(new IndexEntry(offset, baseRevision)); - da.skip(3*4 + 32); - } - if (da.isEmpty()) { - // fine, done then - res.trimToSize(); - index = res; - break; - } else { - // start reading next record - long l = da.readLong(); - offset = l >>> 16; - } - } - } catch (IOException ex) { - ex.printStackTrace(); // log error - // too bad, no outline then. - index = Collections.emptyList(); - } finally { - da.done(); - } - - } - - - // perhaps, package-local or protected, if anyone else from low-level needs them - // XXX think over if we should keep offset in case of separate data file - we read the field anyway. Perhaps, distinct entry classes for Inline and non-inline indexes? - private static class IndexEntry { - public final long offset; // for separate .i and .d - copy of index record entry, for inline index - actual offset of the record in the .i file (record entry + revision * record size)) - //public final int length; // data past fixed record (need to decide whether including header size or not), and whether length is of compressed data or not - public final int baseRevision; - - public IndexEntry(long o, int baseRev) { - offset = o; - baseRevision = baseRev; - } - } - - // mpatch.c : apply() - // FIXME need to implement patch merge (fold, combine, gather and discard from aforementioned mpatch.[c|py]), also see Revlog and Mercurial PDF - /*package-local for HgBundle; until moved to better place*/static byte[] apply(byte[] baseRevisionContent, int outcomeLen, List patch) { - int last = 0, destIndex = 0; - if (outcomeLen == -1) { - outcomeLen = baseRevisionContent.length; - for (PatchRecord pr : patch) { - outcomeLen += pr.start - last + pr.len; - last = pr.end; - } - outcomeLen -= last; - last = 0; - } - byte[] rv = new byte[outcomeLen]; - for (PatchRecord pr : patch) { - System.arraycopy(baseRevisionContent, last, rv, destIndex, pr.start-last); - destIndex += pr.start - last; - System.arraycopy(pr.data, 0, rv, destIndex, pr.data.length); - destIndex += pr.data.length; - last = pr.end; - } - System.arraycopy(baseRevisionContent, last, rv, destIndex, baseRevisionContent.length - last); - return rv; - } - - // @see http://mercurial.selenic.com/wiki/BundleFormat, in Changelog group description - /*package-local*/ static class PatchRecord { // copy of struct frag from mpatch.c - /* - Given there are pr1 and pr2: - pr1.start to pr1.end will be replaced with pr's data (of pr1.len) - pr1.end to pr2.start gets copied from base - */ - int start, end, len; - byte[] data; - - // TODO consider PatchRecord that only records data position (absolute in data source), and acquires data as needed - private PatchRecord(int p1, int p2, int length, byte[] src) { - start = p1; - end = p2; - len = length; - data = src; - } - - /*package-local*/ static PatchRecord read(byte[] data, int offset) { - final int x = offset; // shorthand - int p1 = ((data[x] & 0xFF)<< 24) | ((data[x+1] & 0xFF) << 16) | ((data[x+2] & 0xFF) << 8) | (data[x+3] & 0xFF); - int p2 = ((data[x+4] & 0xFF) << 24) | ((data[x+5] & 0xFF) << 16) | ((data[x+6] & 0xFF) << 8) | (data[x+7] & 0xFF); - int len = ((data[x+8] & 0xFF) << 24) | ((data[x+9] & 0xFF) << 16) | ((data[x+10] & 0xFF) << 8) | (data[x+11] & 0xFF); - byte[] dataCopy = new byte[len]; - System.arraycopy(data, x+12, dataCopy, 0, len); - return new PatchRecord(p1, p2, len, dataCopy); - } - - /*package-local*/ static PatchRecord read(DataAccess da) throws IOException { - int p1 = da.readInt(); - int p2 = da.readInt(); - int len = da.readInt(); - byte[] src = new byte[len]; - da.readBytes(src, 0, len); - return new PatchRecord(p1, p2, len, src); - } - - - } -} diff -r 0d279bcc4442 -r 6f1b88693d48 src/com/tmate/hgkit/ll/StatusCollector.java --- a/src/com/tmate/hgkit/ll/StatusCollector.java Sun Jan 23 04:06:18 2011 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,331 +0,0 @@ -/* - * Copyright (c) 2011 Artem Tikhomirov - */ -package com.tmate.hgkit.ll; - -import static com.tmate.hgkit.ll.HgRepository.BAD_REVISION; -import static com.tmate.hgkit.ll.HgRepository.TIP; - -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.LinkedHashMap; -import java.util.LinkedList; -import java.util.List; -import java.util.Map; -import java.util.TreeSet; - -/** - * RevisionWalker? - * @author artem - */ -public class StatusCollector { - - private final HgRepository repo; - private final Map cache; // sparse array, in fact - - public StatusCollector(HgRepository hgRepo) { - this.repo = hgRepo; - cache = new HashMap(); - ManifestRevisionInspector emptyFakeState = new ManifestRevisionInspector(-1, -1); - emptyFakeState.begin(-1, null); - emptyFakeState.end(-1); // FIXME HgRepo.TIP == -1 as well, need to distinguish fake "prior to first" revision from "the very last" - cache.put(-1, emptyFakeState); - } - - public HgRepository getRepo() { - return repo; - } - - private ManifestRevisionInspector get(int rev) { - ManifestRevisionInspector i = cache.get(rev); - if (i == null) { - i = new ManifestRevisionInspector(rev, rev); - cache.put(rev, i); - repo.getManifest().walk(rev, rev, i); - } - return i; - } - - /*package-local*/ ManifestRevisionInspector raw(int rev) { - return get(rev); - } - - // hg status --change - public void change(int rev, Inspector inspector) { - int[] parents = new int[2]; - repo.getChangelog().parents(rev, parents, null, null); - walk(parents[0], rev, inspector); - } - - // I assume revision numbers are the same for changelog and manifest - here - // user would like to pass changelog revision numbers, and I use them directly to walk manifest. - // if this assumption is wrong, fix this (lookup manifest revisions from changeset). - public void walk(int rev1, int rev2, Inspector inspector) { - if (rev1 == rev2) { - throw new IllegalArgumentException(); - } - if (inspector == null) { - throw new IllegalArgumentException(); - } - if (inspector instanceof Record) { - ((Record) inspector).init(rev1, rev2, this); - } - if (rev1 == TIP) { - rev1 = repo.getManifest().getRevisionCount() - 1; - } - if (rev2 == TIP) { - rev2 = repo.getManifest().getRevisionCount() - 1; // XXX add Revlog.tip() func ? - } - // in fact, rev1 and rev2 are often next (or close) to each other, - // thus, we can optimize Manifest reads here (manifest.walk(rev1, rev2)) - ManifestRevisionInspector r1, r2; - if (!cache.containsKey(rev1) && !cache.containsKey(rev2) && Math.abs(rev1 - rev2) < 5 /*subjective equivalent of 'close enough'*/) { - int minRev = rev1 < rev2 ? rev1 : rev2; - int maxRev = minRev == rev1 ? rev2 : rev1; - r1 = r2 = new ManifestRevisionInspector(minRev, maxRev); - for (int i = minRev; i <= maxRev; i++) { - cache.put(i, r1); - } - repo.getManifest().walk(minRev, maxRev, r1); - } else { - r1 = get(rev1); - r2 = get(rev2); - } - - TreeSet r1Files = new TreeSet(r1.files(rev1)); - for (String fname : r2.files(rev2)) { - if (r1Files.remove(fname)) { - Nodeid nidR1 = r1.nodeid(rev1, fname); - Nodeid nidR2 = r2.nodeid(rev2, fname); - String flagsR1 = r1.flags(rev1, fname); - String flagsR2 = r2.flags(rev2, fname); - if (nidR1.equals(nidR2) && ((flagsR2 == null && flagsR1 == null) || flagsR2.equals(flagsR1))) { - inspector.clean(fname); - } else { - inspector.modified(fname); - } - } else { - inspector.added(fname); - } - } - for (String left : r1Files) { - inspector.removed(left); - } - // inspector.done() if useful e.g. in UI client - } - - public Record status(int rev1, int rev2) { - Record rv = new Record(); - walk(rev1, rev2, rv); - return rv; - } - - public interface Inspector { - void modified(String fname); - void added(String fname); - void copied(String fnameOrigin, String fnameAdded); // if copied files of no interest, should delegate to self.added(fnameAdded); - void removed(String fname); - void clean(String fname); - void missing(String fname); // aka deleted (tracked by Hg, but not available in FS any more - void unknown(String fname); // not tracked - void ignored(String fname); - } - - // XXX for r1..r2 status, only modified, added, removed (and perhaps, clean) make sense - public static class Record implements Inspector { - private List modified, added, removed, clean, missing, unknown, ignored; - private Map copied; - - private int startRev, endRev; - private StatusCollector statusHelper; - - // XXX StatusCollector may additionally initialize Record instance to speed lookup of changed file revisions - // here I need access to ManifestRevisionInspector via #raw(). Perhaps, non-static class (to get - // implicit reference to StatusCollector) may be better? - // Since users may want to reuse Record instance we've once created (and initialized), we need to - // ensure functionality is correct for each/any call (#walk checks instanceof Record and fixes it up) - // Perhaps, distinct helper (sc.getRevisionHelper().nodeid(fname)) would be better, just not clear - // how to supply [start..end] values there easily - /*package-local*/void init(int startRevision, int endRevision, StatusCollector self) { - startRev = startRevision; - endRev = endRevision; - statusHelper = self; - } - - public Nodeid nodeidBeforeChange(String fname) { - if (statusHelper == null || startRev == BAD_REVISION) { - return null; - } - if ((modified == null || !modified.contains(fname)) && (removed == null || !removed.contains(fname))) { - return null; - } - return statusHelper.raw(startRev).nodeid(startRev, fname); - } - public Nodeid nodeidAfterChange(String fname) { - if (statusHelper == null || endRev == BAD_REVISION) { - return null; - } - if ((modified == null || !modified.contains(fname)) && (added == null || !added.contains(fname))) { - return null; - } - return statusHelper.raw(endRev).nodeid(endRev, fname); - } - - public List getModified() { - return proper(modified); - } - - public List getAdded() { - return proper(added); - } - - public List getRemoved() { - return proper(removed); - } - - public Map getCopied() { - if (copied == null) { - return Collections.emptyMap(); - } - return Collections.unmodifiableMap(copied); - } - - public List getClean() { - return proper(clean); - } - - public List getMissing() { - return proper(missing); - } - - public List getUnknown() { - return proper(unknown); - } - - public List getIgnored() { - return proper(ignored); - } - - private List proper(List l) { - if (l == null) { - return Collections.emptyList(); - } - return Collections.unmodifiableList(l); - } - - // - // - - public void modified(String fname) { - modified = doAdd(modified, fname); - } - - public void added(String fname) { - added = doAdd(added, fname); - } - - public void copied(String fnameOrigin, String fnameAdded) { - if (copied == null) { - copied = new LinkedHashMap(); - } - copied.put(fnameOrigin, fnameAdded); - } - - public void removed(String fname) { - removed = doAdd(removed, fname); - } - - public void clean(String fname) { - clean = doAdd(clean, fname); - } - - public void missing(String fname) { - missing = doAdd(missing, fname); - } - - public void unknown(String fname) { - unknown = doAdd(unknown, fname); - } - - public void ignored(String fname) { - ignored = doAdd(ignored, fname); - } - - private static List doAdd(List l, String s) { - if (l == null) { - l = new LinkedList(); - } - l.add(s); - return l; - } - } - - // XXX in fact, indexed access brings more trouble than benefits, get rid of it? Distinct instance per revision is good enough - public /*XXX private, actually. Made public unless repo.statusLocal finds better place*/ static final class ManifestRevisionInspector implements HgManifest.Inspector { - private final HashMap[] idsMap; - private final HashMap[] flagsMap; - private final int baseRevision; - private int r = -1; // cursor - - /** - * [minRev, maxRev] - * [-1,-1] also accepted (for fake empty instance) - * @param minRev - inclusive - * @param maxRev - inclusive - */ - @SuppressWarnings("unchecked") - public ManifestRevisionInspector(int minRev, int maxRev) { - baseRevision = minRev; - int range = maxRev - minRev + 1; - idsMap = new HashMap[range]; - flagsMap = new HashMap[range]; - } - - public Collection files(int rev) { - if (rev < baseRevision || rev >= baseRevision + idsMap.length) { - throw new IllegalArgumentException(); - } - return idsMap[rev - baseRevision].keySet(); - } - - public Nodeid nodeid(int rev, String fname) { - if (rev < baseRevision || rev >= baseRevision + idsMap.length) { - throw new IllegalArgumentException(); - } - return idsMap[rev - baseRevision].get(fname); - } - - public String flags(int rev, String fname) { - if (rev < baseRevision || rev >= baseRevision + idsMap.length) { - throw new IllegalArgumentException(); - } - return flagsMap[rev - baseRevision].get(fname); - } - - // - - public boolean next(Nodeid nid, String fname, String flags) { - idsMap[r].put(fname, nid); - flagsMap[r].put(fname, flags); - return true; - } - - public boolean end(int revision) { - assert revision == r + baseRevision; - r = -1; - return revision+1 < baseRevision + idsMap.length; - } - - public boolean begin(int revision, Nodeid nid) { - if (revision < baseRevision || revision >= baseRevision + idsMap.length) { - throw new IllegalArgumentException(); - } - r = revision - baseRevision; - idsMap[r] = new HashMap(); - flagsMap[r] = new HashMap(); - return true; - } - } - -} diff -r 0d279bcc4442 -r 6f1b88693d48 src/com/tmate/hgkit/ll/WorkingCopyStatusCollector.java --- a/src/com/tmate/hgkit/ll/WorkingCopyStatusCollector.java Sun Jan 23 04:06:18 2011 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,214 +0,0 @@ -/* - * Copyright (c) 2011 Artem Tikhomirov - */ -package com.tmate.hgkit.ll; - -import static com.tmate.hgkit.ll.HgRepository.BAD_REVISION; -import static com.tmate.hgkit.ll.HgRepository.TIP; - -import java.io.BufferedInputStream; -import java.io.File; -import java.io.FileInputStream; -import java.io.IOException; -import java.util.Collections; -import java.util.Set; -import java.util.TreeSet; - -import com.tmate.hgkit.fs.FileWalker; - -/** - * - * @author artem - */ -public class WorkingCopyStatusCollector { - - private final HgRepository repo; - private final FileWalker repoWalker; - private HgDirstate dirstate; - private StatusCollector baseRevisionCollector; - - public WorkingCopyStatusCollector(HgRepository hgRepo, FileWalker hgRepoWalker) { - this.repo = hgRepo; - this.repoWalker = hgRepoWalker; - } - - /** - * Optionally, supply a collector instance that may cache (or have already cached) base revision - * @param sc may be null - */ - public void setBaseRevisionCollector(StatusCollector sc) { - baseRevisionCollector = sc; - } - - private HgDirstate getDirstate() { - if (dirstate == null) { - if (repo instanceof LocalHgRepo) { - dirstate = ((LocalHgRepo) repo).loadDirstate(); - } else { - dirstate = new HgDirstate(); - } - } - return dirstate; - } - - // may be invoked few times - public void walk(int baseRevision, StatusCollector.Inspector inspector) { - final HgIgnore hgIgnore = ((LocalHgRepo) repo).loadIgnore(); // FIXME hack - TreeSet knownEntries = getDirstate().all(); - final boolean isTipBase; - if (baseRevision == TIP) { - baseRevision = repo.getManifest().getRevisionCount() - 1; - isTipBase = true; - } else { - isTipBase = baseRevision == repo.getManifest().getRevisionCount() - 1; - } - StatusCollector.ManifestRevisionInspector collect = null; - Set baseRevFiles = Collections.emptySet(); - if (!isTipBase) { - if (baseRevisionCollector != null) { - collect = baseRevisionCollector.raw(baseRevision); - } else { - collect = new StatusCollector.ManifestRevisionInspector(baseRevision, baseRevision); - repo.getManifest().walk(baseRevision, baseRevision, collect); - } - baseRevFiles = new TreeSet(collect.files(baseRevision)); - } - if (inspector instanceof StatusCollector.Record) { - StatusCollector sc = baseRevisionCollector == null ? new StatusCollector(repo) : baseRevisionCollector; - ((StatusCollector.Record) inspector).init(baseRevision, BAD_REVISION, sc); - } - repoWalker.reset(); - while (repoWalker.hasNext()) { - repoWalker.next(); - String fname = repoWalker.name(); - File f = repoWalker.file(); - if (hgIgnore.isIgnored(fname)) { - inspector.ignored(fname); - } else if (knownEntries.remove(fname)) { - // modified, added, removed, clean - if (collect != null) { // need to check against base revision, not FS file - Nodeid nid1 = collect.nodeid(baseRevision, fname); - String flags = collect.flags(baseRevision, fname); - checkLocalStatusAgainstBaseRevision(baseRevFiles, nid1, flags, fname, f, inspector); - baseRevFiles.remove(fname); - } else { - checkLocalStatusAgainstFile(fname, f, inspector); - } - } else { - inspector.unknown(fname); - } - } - if (collect != null) { - for (String r : baseRevFiles) { - inspector.removed(r); - } - } - for (String m : knownEntries) { - // removed from the repository and missing from working dir shall not be reported as 'deleted' - if (getDirstate().checkRemoved(m) == null) { - inspector.missing(m); - } - } - } - - public StatusCollector.Record status(int baseRevision) { - StatusCollector.Record rv = new StatusCollector.Record(); - walk(baseRevision, rv); - return rv; - } - - //******************************************** - - - private void checkLocalStatusAgainstFile(String fname, File f, StatusCollector.Inspector inspector) { - HgDirstate.Record r; - if ((r = getDirstate().checkNormal(fname)) != null) { - // either clean or modified - if (f.lastModified() / 1000 == r.time && r.size == f.length()) { - inspector.clean(fname); - } else { - // FIXME check actual content to avoid false modified files - inspector.modified(fname); - } - } else if ((r = getDirstate().checkAdded(fname)) != null) { - if (r.name2 == null) { - inspector.added(fname); - } else { - inspector.copied(fname, r.name2); - } - } else if ((r = getDirstate().checkRemoved(fname)) != null) { - inspector.removed(fname); - } else if ((r = getDirstate().checkMerged(fname)) != null) { - inspector.modified(fname); - } - } - - // XXX refactor checkLocalStatus methods in more OO way - private void checkLocalStatusAgainstBaseRevision(Set baseRevNames, Nodeid nid1, String flags, String fname, File f, StatusCollector.Inspector inspector) { - // fname is in the dirstate, either Normal, Added, Removed or Merged - HgDirstate.Record r; - if (nid1 == null) { - // normal: added? - // added: not known at the time of baseRevision, shall report - // merged: was not known, report as added? - if ((r = getDirstate().checkAdded(fname)) != null) { - if (r.name2 != null && baseRevNames.contains(r.name2)) { - baseRevNames.remove(r.name2); - inspector.copied(r.name2, fname); - return; - } - // fall-through, report as added - } else if (getDirstate().checkRemoved(fname) != null) { - // removed: removed file was not known at the time of baseRevision, and we should not report it as removed - return; - } - inspector.added(fname); - } else { - // was known; check whether clean or modified - // when added - seems to be the case of a file added once again, hence need to check if content is different - if ((r = getDirstate().checkNormal(fname)) != null || (r = getDirstate().checkMerged(fname)) != null || (r = getDirstate().checkAdded(fname)) != null) { - // either clean or modified - HgDataFile fileNode = repo.getFileNode(fname); - final int lengthAtRevision = fileNode.length(nid1); - if (r.size /* XXX File.length() ?! */ != lengthAtRevision || flags != todoGenerateFlags(fname /*java.io.File*/)) { - inspector.modified(fname); - } else { - // check actual content to see actual changes - // XXX consider adding HgDataDile.compare(File/byte[]/whatever) operation to optimize comparison - if (areTheSame(f, fileNode.content(nid1))) { - inspector.clean(fname); - } else { - inspector.modified(fname); - } - } - } - // only those left in idsMap after processing are reported as removed - } - - // TODO think over if content comparison may be done more effectively by e.g. calculating nodeid for a local file and comparing it with nodeid from manifest - // we don't need to tell exact difference, hash should be enough to detect difference, and it doesn't involve reading historical file content, and it's relatively - // cheap to calc hash on a file (no need to keep it completely in memory). OTOH, if I'm right that the next approach is used for nodeids: - // changeset nodeid + hash(actual content) => entry (Nodeid) in the next Manifest - // then it's sufficient to check parents from dirstate, and if they do not match parents from file's baseRevision (non matching parents means different nodeids). - // The question is whether original Hg treats this case (same content, different parents and hence nodeids) as 'modified' or 'clean' - } - - private static String todoGenerateFlags(String fname) { - // FIXME implement - return null; - } - private static boolean areTheSame(File f, byte[] data) { - try { - BufferedInputStream is = new BufferedInputStream(new FileInputStream(f)); - int i = 0; - while (i < data.length && data[i] == is.read()) { - i++; // increment only for successful match, otherwise won't tell last byte in data was the same as read from the stream - } - return i == data.length && is.read() == -1; // although data length is expected to be the same (see caller), check that we reached EOF, no more data left. - } catch (IOException ex) { - ex.printStackTrace(); // log warn - } - return false; - } - -} diff -r 0d279bcc4442 -r 6f1b88693d48 src/com/tmate/hgkit/ll/package.html --- a/src/com/tmate/hgkit/ll/package.html Sun Jan 23 04:06:18 2011 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,5 +0,0 @@ - - -Low-level API operations - - \ No newline at end of file diff -r 0d279bcc4442 -r 6f1b88693d48 src/org/tmatesoft/hg/core/Cset.java --- a/src/org/tmatesoft/hg/core/Cset.java Sun Jan 23 04:06:18 2011 +0100 +++ b/src/org/tmatesoft/hg/core/Cset.java Mon Jan 24 03:14:45 2011 +0100 @@ -21,12 +21,11 @@ import java.util.List; import org.tmatesoft.hg.core.LogCommand.FileRevision; +import org.tmatesoft.hg.repo.Changeset; +import org.tmatesoft.hg.repo.HgRepository; +import org.tmatesoft.hg.repo.StatusCollector; import org.tmatesoft.hg.util.PathPool; -import com.tmate.hgkit.ll.Changeset; -import com.tmate.hgkit.ll.HgRepository; -import com.tmate.hgkit.ll.Nodeid; -import com.tmate.hgkit.ll.StatusCollector; /** * TODO rename to Changeset along with original Changeset moved to .repo and renamed to HgChangeset? diff -r 0d279bcc4442 -r 6f1b88693d48 src/org/tmatesoft/hg/core/LogCommand.java --- a/src/org/tmatesoft/hg/core/LogCommand.java Sun Jan 23 04:06:18 2011 +0100 +++ b/src/org/tmatesoft/hg/core/LogCommand.java Mon Jan 24 03:14:45 2011 +0100 @@ -16,7 +16,7 @@ */ package org.tmatesoft.hg.core; -import static com.tmate.hgkit.ll.HgRepository.TIP; +import static org.tmatesoft.hg.repo.HgRepository.TIP; import java.util.Calendar; import java.util.Collections; @@ -26,12 +26,11 @@ import java.util.Set; import java.util.TreeSet; +import org.tmatesoft.hg.repo.Changeset; +import org.tmatesoft.hg.repo.HgRepository; +import org.tmatesoft.hg.repo.StatusCollector; import org.tmatesoft.hg.util.PathPool; -import com.tmate.hgkit.ll.Changeset; -import com.tmate.hgkit.ll.HgRepository; -import com.tmate.hgkit.ll.Nodeid; -import com.tmate.hgkit.ll.StatusCollector; /** *
@@ -137,7 +136,7 @@
 	}
 
 	/**
-	 * Similar to {@link #execute(com.tmate.hgkit.ll.Changeset.Inspector)}, collects and return result as a list.
+	 * Similar to {@link #execute(org.tmatesoft.hg.repo.Changeset.Inspector)}, collects and return result as a list.
 	 */
 	public List execute() {
 		CollectHandler collector = new CollectHandler();
diff -r 0d279bcc4442 -r 6f1b88693d48 src/org/tmatesoft/hg/core/Nodeid.java
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/org/tmatesoft/hg/core/Nodeid.java	Mon Jan 24 03:14:45 2011 +0100
@@ -0,0 +1,138 @@
+/*
+ * Copyright (c) 2010-2011 TMate Software Ltd
+ *  
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; version 2 of the License.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ * GNU General Public License for more details.
+ *
+ * For information on how to redistribute this software under
+ * the terms of a license other than GNU General Public License
+ * contact TMate Software at support@svnkit.com
+ */
+package org.tmatesoft.hg.core;
+
+import static org.tmatesoft.hg.internal.DigestHelper.toHexString;
+
+import java.util.Arrays;
+
+
+
+/**
+ * A 20-bytes (40 characters) long hash value to identify a revision.
+ * @see http://mercurial.selenic.com/wiki/Nodeid
+ *
+ * @author Artem Tikhomirov
+ * @author TMate Software Ltd.
+ *
+ */
+public final class Nodeid {
+	
+	/**
+	 * nullid, empty root revision.
+	 */
+	public static final Nodeid NULL = new Nodeid(new byte[20], false);
+
+	private final byte[] binaryData; 
+
+	/**
+	 * @param binaryRepresentation - array of exactly 20 bytes
+	 * @param shallClone - true if array is subject to future modification and shall be copied, not referenced 
+	 */
+	public Nodeid(byte[] binaryRepresentation, boolean shallClone) {
+		// 5 int fields => 32 bytes
+		// byte[20] => 48 bytes
+		if (binaryRepresentation == null || binaryRepresentation.length != 20) {
+			throw new IllegalArgumentException();
+		}
+		this.binaryData = shallClone ? binaryRepresentation.clone() : binaryRepresentation;
+	}
+
+	@Override
+	public int hashCode() {
+		// digest (part thereof) seems to be nice candidate for the hashCode
+		byte[] b = binaryData;
+		return b[0] << 24 | (b[1] & 0xFF) << 16 | (b[2] & 0xFF) << 8 | (b[3] & 0xFF);
+	}
+	
+	@Override
+	public boolean equals(Object o) {
+		if (o instanceof Nodeid) {
+			return Arrays.equals(this.binaryData, ((Nodeid) o).binaryData);
+		}
+		return false;
+	}
+
+	public boolean equalsTo(byte[] buf) {
+		return Arrays.equals(this.binaryData, buf);
+	}
+	
+	@Override
+	public String toString() {
+		// XXX may want to output just single 0 for the NULL id?
+		return toHexString(binaryData, 0, binaryData.length);
+	}
+
+	public String shortNotation() {
+		return toHexString(binaryData, 0, 6);
+	}
+	
+	public boolean isNull() {
+		if (this == NULL) {
+			return true;
+		}
+		for (int i = 0; i < 20; i++) {
+			if (this.binaryData[i] != 0) {
+				return false;
+			}
+		}
+		return true;
+	}
+
+	// copy 
+	public byte[] toByteArray() {
+		return binaryData.clone();
+	}
+
+	// primary difference with cons is handling of NULL id (this method returns constant)
+	// always makes a copy of an array passed
+	public static Nodeid fromBinary(byte[] binaryRepresentation, int offset) {
+		if (binaryRepresentation == null || binaryRepresentation.length - offset < 20) {
+			throw new IllegalArgumentException();
+		}
+		int i = 0;
+		while (i < 20 && binaryRepresentation[offset+i] == 0) i++;
+		if (i == 20) {
+			return NULL;
+		}
+		if (offset == 0 && binaryRepresentation.length == 20) {
+			return new Nodeid(binaryRepresentation, true);
+		}
+		byte[] b = new byte[20]; // create new instance if no other reasonable guesses possible
+		System.arraycopy(binaryRepresentation, offset, b, 0, 20);
+		return new Nodeid(b, false);
+	}
+
+	public static Nodeid fromAscii(byte[] asciiRepresentation, int offset, int length) {
+		if (length != 40) {
+			throw new IllegalArgumentException();
+		}
+		byte[] data = new byte[20];
+		boolean zeroBytes = true;
+		for (int i = 0, j = offset; i < data.length; i++) {
+			int hiNibble = Character.digit(asciiRepresentation[j++], 16);
+			int lowNibble = Character.digit(asciiRepresentation[j++], 16);
+			byte b = (byte) (((hiNibble << 4) | lowNibble) & 0xFF);
+			data[i] = b;
+			zeroBytes = zeroBytes && b == 0;
+		}
+		if (zeroBytes) {
+			return NULL;
+		}
+		return new Nodeid(data, false);
+	}
+}
diff -r 0d279bcc4442 -r 6f1b88693d48 src/org/tmatesoft/hg/core/RepositoryFacade.java
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/org/tmatesoft/hg/core/RepositoryFacade.java	Mon Jan 24 03:14:45 2011 +0100
@@ -0,0 +1,51 @@
+/*
+ * Copyright (c) 2011 TMate Software Ltd
+ *  
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; version 2 of the License.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ * GNU General Public License for more details.
+ *
+ * For information on how to redistribute this software under
+ * the terms of a license other than GNU General Public License
+ * contact TMate Software at support@svnkit.com
+ */
+package org.tmatesoft.hg.core;
+
+import java.io.File;
+
+import org.tmatesoft.hg.repo.HgRepository;
+import org.tmatesoft.hg.repo.Lookup;
+
+/**
+ *
+ * @author Artem Tikhomirov
+ * @author TMate Software Ltd.
+ */
+public class RepositoryFacade {
+	private HgRepository repo;
+
+	public RepositoryFacade() {
+	}
+
+	public boolean init() throws Exception /*FIXME RepoInitException*/ {
+		repo = new Lookup().detectFromWorkingDir();
+		return repo != null && !repo.isInvalid();
+	}
+	
+	public boolean initFrom(File repoLocation) throws Exception {
+		repo = new Lookup().detect(repoLocation.getCanonicalPath());
+		return repo != null && !repo.isInvalid();
+	}
+
+	public LogCommand createLogCommand() {
+		return new LogCommand(repo/*, getCommandContext()*/);
+	}
+	public StatusCommand createStatusCommand() {
+		return new StatusCommand(repo/*, getCommandContext()*/);
+	}
+}
diff -r 0d279bcc4442 -r 6f1b88693d48 src/org/tmatesoft/hg/core/RepositoryTreeWalker.java
--- a/src/org/tmatesoft/hg/core/RepositoryTreeWalker.java	Sun Jan 23 04:06:18 2011 +0100
+++ b/src/org/tmatesoft/hg/core/RepositoryTreeWalker.java	Mon Jan 24 03:14:45 2011 +0100
@@ -16,7 +16,7 @@
  */
 package org.tmatesoft.hg.core;
 
-import static com.tmate.hgkit.ll.HgRepository.TIP;
+import static org.tmatesoft.hg.repo.HgRepository.TIP;
 
 import java.util.ConcurrentModificationException;
 import java.util.LinkedHashMap;
@@ -24,11 +24,10 @@
 import java.util.List;
 
 import org.tmatesoft.hg.core.LogCommand.FileRevision;
+import org.tmatesoft.hg.repo.HgManifest;
+import org.tmatesoft.hg.repo.HgRepository;
 import org.tmatesoft.hg.util.PathPool;
 
-import com.tmate.hgkit.ll.HgManifest;
-import com.tmate.hgkit.ll.HgRepository;
-import com.tmate.hgkit.ll.Nodeid;
 
 /**
  *
diff -r 0d279bcc4442 -r 6f1b88693d48 src/org/tmatesoft/hg/core/StatusCommand.java
--- a/src/org/tmatesoft/hg/core/StatusCommand.java	Sun Jan 23 04:06:18 2011 +0100
+++ b/src/org/tmatesoft/hg/core/StatusCommand.java	Mon Jan 24 03:14:45 2011 +0100
@@ -16,19 +16,14 @@
  */
 package org.tmatesoft.hg.core;
 
-import static com.tmate.hgkit.ll.HgRepository.BAD_REVISION;
-import static com.tmate.hgkit.ll.HgRepository.TIP;
-import static com.tmate.hgkit.ll.HgRepository.WORKING_COPY;
+import static org.tmatesoft.hg.repo.HgRepository.BAD_REVISION;
+import static org.tmatesoft.hg.repo.HgRepository.TIP;
+import static org.tmatesoft.hg.repo.HgRepository.WORKING_COPY;
 
 import org.tmatesoft.hg.core.Path.Matcher;
-import org.tmatesoft.hg.util.PathPool;
-
-import com.tmate.hgkit.fs.FileWalker;
-import com.tmate.hgkit.ll.HgRepository;
-import com.tmate.hgkit.ll.LocalHgRepo;
-import com.tmate.hgkit.ll.StatusCollector;
-import com.tmate.hgkit.ll.WorkingCopyStatusCollector;
-import com.tmate.hgkit.ll.StatusCollector.Record;
+import org.tmatesoft.hg.repo.HgRepository;
+import org.tmatesoft.hg.repo.StatusCollector;
+import org.tmatesoft.hg.repo.WorkingCopyStatusCollector;
 
 /**
  *
@@ -144,7 +139,7 @@
 //		StatusCollector.Record r = new StatusCollector.Record(); // XXX use own inspector not to collect entries that
 		// are not interesting or do not match name
 		if (endRevision == WORKING_COPY) {
-			WorkingCopyStatusCollector wcsc = new WorkingCopyStatusCollector(repo, ((LocalHgRepo) repo).createWorkingDirWalker());
+			WorkingCopyStatusCollector wcsc = new WorkingCopyStatusCollector(repo);
 			wcsc.setBaseRevisionCollector(sc);
 			wcsc.walk(startRevision, inspector);
 		} else {
diff -r 0d279bcc4442 -r 6f1b88693d48 src/org/tmatesoft/hg/core/package.html
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/org/tmatesoft/hg/core/package.html	Mon Jan 24 03:14:45 2011 +0100
@@ -0,0 +1,5 @@
+
+
+Hi-level API
+
+
\ No newline at end of file
diff -r 0d279bcc4442 -r 6f1b88693d48 src/org/tmatesoft/hg/internal/DataAccess.java
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/org/tmatesoft/hg/internal/DataAccess.java	Mon Jan 24 03:14:45 2011 +0100
@@ -0,0 +1,64 @@
+/*
+ * Copyright (c) 2010 TMate Software Ltd
+ *  
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; version 2 of the License.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ * GNU General Public License for more details.
+ *
+ * For information on how to redistribute this software under
+ * the terms of a license other than GNU General Public License
+ * contact TMate Software at support@svnkit.com
+ */
+package org.tmatesoft.hg.internal;
+
+import java.io.IOException;
+
+/**
+ * relevant parts of DataInput, non-stream nature (seek operation), explicit check for end of data.
+ * convenient skip (+/- bytes)
+ * Primary goal - effective file read, so that clients don't need to care whether to call few 
+ * distinct getInt() or readBytes(totalForFewInts) and parse themselves instead in an attempt to optimize.  
+ * 
+ * @author Artem Tikhomirov
+ * @author TMate Software Ltd.
+ */
+public class DataAccess {
+	public boolean isEmpty() {
+		return true;
+	}
+	// absolute positioning
+	public void seek(long offset) throws IOException {
+		throw new UnsupportedOperationException();
+	}
+	// relative positioning
+	public void skip(int bytes) throws IOException {
+		throw new UnsupportedOperationException();
+	}
+	// shall be called once this object no longer needed
+	public void done() {
+		// no-op in this empty implementation
+	}
+	public int readInt() throws IOException {
+		byte[] b = new byte[4];
+		readBytes(b, 0, 4);
+		return b[0] << 24 | (b[1] & 0xFF) << 16 | (b[2] & 0xFF) << 8 | (b[3] & 0xFF);
+	}
+	public long readLong() throws IOException {
+		byte[] b = new byte[8];
+		readBytes(b, 0, 8);
+		int i1 = b[0] << 24 | (b[1] & 0xFF) << 16 | (b[2] & 0xFF) << 8 | (b[3] & 0xFF);
+		int i2 = b[4] << 24 | (b[5] & 0xFF) << 16 | (b[6] & 0xFF) << 8 | (b[7] & 0xFF);
+		return ((long) i1) << 32 | ((long) i2 & 0xFFFFFFFF);
+	}
+	public void readBytes(byte[] buf, int offset, int length) throws IOException {
+		throw new UnsupportedOperationException();
+	}
+	public byte readByte() throws IOException {
+		throw new UnsupportedOperationException();
+	}
+}
\ No newline at end of file
diff -r 0d279bcc4442 -r 6f1b88693d48 src/org/tmatesoft/hg/internal/DataAccessProvider.java
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/org/tmatesoft/hg/internal/DataAccessProvider.java	Mon Jan 24 03:14:45 2011 +0100
@@ -0,0 +1,279 @@
+/*
+ * Copyright (c) 2010-2011 TMate Software Ltd
+ *  
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; version 2 of the License.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ * GNU General Public License for more details.
+ *
+ * For information on how to redistribute this software under
+ * the terms of a license other than GNU General Public License
+ * contact TMate Software at support@svnkit.com
+ */
+package org.tmatesoft.hg.internal;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.nio.MappedByteBuffer;
+import java.nio.channels.FileChannel;
+
+/**
+ * 
+ * @author Artem Tikhomirov
+ * @author TMate Software Ltd.
+ */
+public class DataAccessProvider {
+
+	private final int mapioMagicBoundary;
+	private final int bufferSize;
+
+	public DataAccessProvider() {
+		this(100 * 1024, 8 * 1024);
+	}
+
+	public DataAccessProvider(int mapioBoundary, int regularBufferSize) {
+		mapioMagicBoundary = mapioBoundary;
+		bufferSize = regularBufferSize;
+	}
+
+	public DataAccess create(File f) {
+		if (!f.exists()) {
+			return new DataAccess();
+		}
+		try {
+			FileChannel fc = new FileInputStream(f).getChannel();
+			if (fc.size() > mapioMagicBoundary) {
+				// TESTS: bufLen of 1024 was used to test MemMapFileAccess
+				return new MemoryMapFileAccess(fc, fc.size(), mapioMagicBoundary);
+			} else {
+				// XXX once implementation is more or less stable,
+				// may want to try ByteBuffer.allocateDirect() to see
+				// if there's any performance gain. 
+				boolean useDirectBuffer = false;
+				// TESTS: bufferSize of 100 was used to check buffer underflow states when readBytes reads chunks bigger than bufSize 
+				return new FileAccess(fc, fc.size(), bufferSize, useDirectBuffer);
+			}
+		} catch (IOException ex) {
+			// unlikely to happen, we've made sure file exists.
+			ex.printStackTrace(); // FIXME log error
+		}
+		return new DataAccess(); // non-null, empty.
+	}
+
+	// DOESN'T WORK YET 
+	private static class MemoryMapFileAccess extends DataAccess {
+		private FileChannel fileChannel;
+		private final long size;
+		private long position = 0; // always points to buffer's absolute position in the file
+		private final int memBufferSize;
+		private MappedByteBuffer buffer;
+
+		public MemoryMapFileAccess(FileChannel fc, long channelSize, int /*long?*/ bufferSize) {
+			fileChannel = fc;
+			size = channelSize;
+			memBufferSize = bufferSize;
+		}
+
+		@Override
+		public boolean isEmpty() {
+			return position + (buffer == null ? 0 : buffer.position()) >= size;
+		}
+		
+		@Override
+		public void seek(long offset) {
+			assert offset >= 0;
+			// offset may not necessarily be further than current position in the file (e.g. rewind) 
+			if (buffer != null && /*offset is within buffer*/ offset >= position && (offset - position) < buffer.limit()) {
+				buffer.position((int) (offset - position));
+			} else {
+				position = offset;
+				buffer = null;
+			}
+		}
+
+		@Override
+		public void skip(int bytes) throws IOException {
+			assert bytes >= 0;
+			if (buffer == null) {
+				position += bytes;
+				return;
+			}
+			if (buffer.remaining() > bytes) {
+				buffer.position(buffer.position() + bytes);
+			} else {
+				position += buffer.position() + bytes;
+				buffer = null;
+			}
+		}
+
+		private void fill() throws IOException {
+			if (buffer != null) {
+				position += buffer.position(); 
+			}
+			long left = size - position;
+			buffer = fileChannel.map(FileChannel.MapMode.READ_ONLY, position, left < memBufferSize ? left : memBufferSize);
+		}
+
+		@Override
+		public void readBytes(byte[] buf, int offset, int length) throws IOException {
+			if (buffer == null || !buffer.hasRemaining()) {
+				fill();
+			}
+			// XXX in fact, we may try to create a MappedByteBuffer of exactly length size here, and read right away
+			while (length > 0) {
+				int tail = buffer.remaining();
+				if (tail == 0) {
+					throw new IOException();
+				}
+				if (tail >= length) {
+					buffer.get(buf, offset, length);
+				} else {
+					buffer.get(buf, offset, tail);
+					fill();
+				}
+				offset += tail;
+				length -= tail;
+			}
+		}
+
+		@Override
+		public byte readByte() throws IOException {
+			if (buffer == null || !buffer.hasRemaining()) {
+				fill();
+			}
+			if (buffer.hasRemaining()) {
+				return buffer.get();
+			}
+			throw new IOException();
+		}
+
+		@Override
+		public void done() {
+			buffer = null;
+			if (fileChannel != null) {
+				try {
+					fileChannel.close();
+				} catch (IOException ex) {
+					ex.printStackTrace(); // log debug
+				}
+				fileChannel = null;
+			}
+		}
+	}
+
+	// (almost) regular file access - FileChannel and buffers.
+	private static class FileAccess extends DataAccess {
+		private FileChannel fileChannel;
+		private final long size;
+		private ByteBuffer buffer;
+		private long bufferStartInFile = 0; // offset of this.buffer in the file.
+
+		public FileAccess(FileChannel fc, long channelSize, int bufferSizeHint, boolean useDirect) {
+			fileChannel = fc;
+			size = channelSize;
+			final int capacity = size < bufferSizeHint ? (int) size : bufferSizeHint;
+			buffer = useDirect ? ByteBuffer.allocateDirect(capacity) : ByteBuffer.allocate(capacity);
+			buffer.flip(); // or .limit(0) to indicate it's empty
+		}
+		
+		@Override
+		public boolean isEmpty() {
+			return bufferStartInFile + buffer.position() >= size;
+		}
+		
+		@Override
+		public void seek(long offset) throws IOException {
+			if (offset > size) {
+				throw new IllegalArgumentException();
+			}
+			if (offset < bufferStartInFile + buffer.limit() && offset >= bufferStartInFile) {
+				buffer.position((int) (offset - bufferStartInFile));
+			} else {
+				// out of current buffer, invalidate it (force re-read) 
+				// XXX or ever re-read it right away?
+				bufferStartInFile = offset;
+				buffer.clear();
+				buffer.limit(0); // or .flip() to indicate we switch to reading
+				fileChannel.position(offset);
+			}
+		}
+
+		@Override
+		public void skip(int bytes) throws IOException {
+			final int newPos = buffer.position() + bytes;
+			if (newPos >= 0 && newPos < buffer.limit()) {
+				// no need to move file pointer, just rewind/seek buffer 
+				buffer.position(newPos);
+			} else {
+				//
+				seek(bufferStartInFile + newPos);
+			}
+		}
+
+		private boolean fill() throws IOException {
+			if (!buffer.hasRemaining()) {
+				bufferStartInFile += buffer.limit();
+				buffer.clear();
+				if (bufferStartInFile < size) { // just in case there'd be any exception on EOF, not -1 
+					fileChannel.read(buffer);
+					// may return -1 when EOF, but empty will reflect this, hence no explicit support here   
+				}
+				buffer.flip();
+			}
+			return buffer.hasRemaining();
+		}
+
+		@Override
+		public void readBytes(byte[] buf, int offset, int length) throws IOException {
+			if (!buffer.hasRemaining()) {
+				fill();
+			}
+			while (length > 0) {
+				int tail = buffer.remaining();
+				if (tail == 0) {
+					throw new IOException(); // shall not happen provided stream contains expected data and no attempts to read past isEmpty() == true are made.
+				}
+				if (tail >= length) {
+					buffer.get(buf, offset, length);
+				} else {
+					buffer.get(buf, offset, tail);
+					fill();
+				}
+				offset += tail;
+				length -= tail;
+			}
+		}
+
+		@Override
+		public byte readByte() throws IOException {
+			if (buffer.hasRemaining()) {
+				return buffer.get();
+			}
+			if (fill()) {
+				return buffer.get();
+			}
+			throw new IOException();
+		}
+
+		@Override
+		public void done() {
+			if (buffer != null) {
+				buffer = null;
+			}
+			if (fileChannel != null) {
+				try {
+					fileChannel.close();
+				} catch (IOException ex) {
+					ex.printStackTrace(); // log debug
+				}
+				fileChannel = null;
+			}
+		}
+	}
+}
diff -r 0d279bcc4442 -r 6f1b88693d48 src/org/tmatesoft/hg/internal/DigestHelper.java
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/org/tmatesoft/hg/internal/DigestHelper.java	Mon Jan 24 03:14:45 2011 +0100
@@ -0,0 +1,115 @@
+/*
+ * Copyright (c) 2010-2011 TMate Software Ltd
+ *  
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; version 2 of the License.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ * GNU General Public License for more details.
+ *
+ * For information on how to redistribute this software under
+ * the terms of a license other than GNU General Public License
+ * contact TMate Software at support@svnkit.com
+ */
+package org.tmatesoft.hg.internal;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.security.MessageDigest;
+import java.security.NoSuchAlgorithmException;
+
+import org.tmatesoft.hg.core.Nodeid;
+
+
+/**
+ * 
+ * DigestHelper dh;
+ * dh.sha1(...).asHexString();
+ *  or 
+ * dh = dh.sha1(...);
+ * nodeid.equalsTo(dh.asBinary());
+ * 
+ * + * @author Artem Tikhomirov + * @author TMate Software Ltd. + */ +public class DigestHelper { + private MessageDigest sha1; + private byte[] digest; + + public DigestHelper() { + } + + private MessageDigest getSHA1() { + if (sha1 == null) { + try { + sha1 = MessageDigest.getInstance("SHA-1"); + } catch (NoSuchAlgorithmException ex) { + // could hardly happen, JDK from Sun always has sha1. + ex.printStackTrace(); // FIXME log error + } + } + return sha1; + } + + + public DigestHelper sha1(Nodeid nodeid1, Nodeid nodeid2, byte[] data) { + return sha1(nodeid1.toByteArray(), nodeid2.toByteArray(), data); + } + + // sha1_digest(min(p1,p2) ++ max(p1,p2) ++ final_text) + public DigestHelper sha1(byte[] nodeidParent1, byte[] nodeidParent2, byte[] data) { + MessageDigest alg = getSHA1(); + if ((nodeidParent1[0] & 0x00FF) < (nodeidParent2[0] & 0x00FF)) { + alg.update(nodeidParent1); + alg.update(nodeidParent2); + } else { + alg.update(nodeidParent2); + alg.update(nodeidParent1); + } + digest = alg.digest(data); + assert digest.length == 20; + return this; + } + + public String asHexString() { + if (digest == null) { + throw new IllegalStateException("Shall init with sha1() call first"); + } + return toHexString(digest, 0, digest.length); + } + + // by reference, be careful not to modify (or #clone() if needed) + public byte[] asBinary() { + if (digest == null) { + throw new IllegalStateException("Shall init with sha1() call first"); + } + return digest; + } + + // XXX perhaps, digest functions should throw an exception, as it's caller responsibility to deal with eof, etc + public DigestHelper sha1(InputStream is /*ByteBuffer*/) throws IOException { + MessageDigest alg = getSHA1(); + byte[] buf = new byte[1024]; + int c; + while ((c = is.read(buf)) != -1) { + alg.update(buf, 0, c); + } + digest = alg.digest(); + return this; + } + + public static String toHexString(byte[] data, final int offset, final int count) { + char[] result = new char[count << 1]; + final String hexDigits = "0123456789abcdef"; + final int end = offset+count; + for (int i = offset, j = 0; i < end; i++) { + result[j++] = hexDigits.charAt((data[i] >>> 4) & 0x0F); + result[j++] = hexDigits.charAt(data[i] & 0x0F); + } + return new String(result); + } +} diff -r 0d279bcc4442 -r 6f1b88693d48 src/org/tmatesoft/hg/internal/Internals.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/org/tmatesoft/hg/internal/Internals.java Mon Jan 24 03:14:45 2011 +0100 @@ -0,0 +1,62 @@ +/* + * Copyright (c) 2011 TMate Software Ltd + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; version 2 of the License. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * For information on how to redistribute this software under + * the terms of a license other than GNU General Public License + * contact TMate Software at support@svnkit.com + */ +package org.tmatesoft.hg.internal; + +import static org.tmatesoft.hg.internal.RequiresFile.DOTENCODE; +import static org.tmatesoft.hg.internal.RequiresFile.FNCACHE; +import static org.tmatesoft.hg.internal.RequiresFile.STORE; + +import org.tmatesoft.hg.util.PathRewrite; + +/** + * Fields/members that shall not be visible + * + * @author Artem Tikhomirov + * @author TMate Software Ltd. + */ +public class Internals { + + private int revlogVersion = 0; + private int requiresFlags = 0; + + void setStorageConfig(int version, int flags) { + revlogVersion = version; + requiresFlags = flags; + } + + // XXX perhaps, should keep both fields right here, not in the HgRepository + public PathRewrite buildDataFilesHelper() { + return new StoragePathHelper((requiresFlags & STORE) != 0, (requiresFlags & FNCACHE) != 0, (requiresFlags & DOTENCODE) != 0); + } + + public PathRewrite buildRepositoryFilesHelper() { + if ((requiresFlags & STORE) != 0) { + return new PathRewrite() { + public String rewrite(String path) { + return "store/" + path; + } + }; + } else { + return new PathRewrite() { + public String rewrite(String path) { + //no-op + return path; + } + }; + } + } +} diff -r 0d279bcc4442 -r 6f1b88693d48 src/org/tmatesoft/hg/internal/RequiresFile.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/org/tmatesoft/hg/internal/RequiresFile.java Mon Jan 24 03:14:45 2011 +0100 @@ -0,0 +1,64 @@ +/* + * Copyright (c) 2011 TMate Software Ltd + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; version 2 of the License. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * For information on how to redistribute this software under + * the terms of a license other than GNU General Public License + * contact TMate Software at support@svnkit.com + */ +package org.tmatesoft.hg.internal; + +import java.io.BufferedReader; +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.io.InputStreamReader; + +/** + * + * @author Artem Tikhomirov + * @author TMate Software Ltd. + */ +public class RequiresFile { + public static final int STORE = 1; + public static final int FNCACHE = 2; + public static final int DOTENCODE = 4; + + public RequiresFile() { + } + + public void parse(Internals repoImpl, File requiresFile) { + if (!requiresFile.exists()) { + return; + } + try { + boolean revlogv1 = false; + boolean store = false; + boolean fncache = false; + boolean dotencode = false; + BufferedReader br = new BufferedReader(new InputStreamReader(new FileInputStream(requiresFile))); + String line; + while ((line = br.readLine()) != null) { + revlogv1 |= "revlogv1".equals(line); + store |= "store".equals(line); + fncache |= "fncache".equals(line); + dotencode |= "dotencode".equals(line); + } + int flags = 0; + flags += store ? 1 : 0; + flags += fncache ? 2 : 0; + flags += dotencode ? 4 : 0; + repoImpl.setStorageConfig(revlogv1 ? 1 : 0, flags); + } catch (IOException ex) { + ex.printStackTrace(); // FIXME log + } + } +} diff -r 0d279bcc4442 -r 6f1b88693d48 src/org/tmatesoft/hg/internal/StoragePathHelper.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/org/tmatesoft/hg/internal/StoragePathHelper.java Mon Jan 24 03:14:45 2011 +0100 @@ -0,0 +1,135 @@ +/* + * Copyright (c) 2011 TMate Software Ltd + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; version 2 of the License. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * For information on how to redistribute this software under + * the terms of a license other than GNU General Public License + * contact TMate Software at support@svnkit.com + */ +package org.tmatesoft.hg.internal; + +import java.util.Arrays; +import java.util.TreeSet; + +import org.tmatesoft.hg.repo.HgRepository; +import org.tmatesoft.hg.util.PathRewrite; + +/** + * @see http://mercurial.selenic.com/wiki/CaseFoldingPlan + * + * @author Artem Tikhomirov + * @author TMate Software Ltd. + */ +class StoragePathHelper implements PathRewrite { + + private final boolean store; + private final boolean fncache; + private final boolean dotencode; + + public StoragePathHelper(boolean isStore, boolean isFncache, boolean isDotencode) { + store = isStore; + fncache = isFncache; + dotencode = isDotencode; + } + + // FIXME document what path argument is, whether it includes .i or .d, and whether it's 'normalized' (slashes) or not. + // since .hg/store keeps both .i files and files without extension (e.g. fncache), guees, for data == false + // we shall assume path has extension + // FIXME much more to be done, see store.py:_hybridencode + public String rewrite(String path) { + final String STR_STORE = "store/"; + final String STR_DATA = "data/"; + final String STR_DH = "dh/"; + + path = path.replace(".hg/", ".hg.hg/").replace(".i/", ".i.hg/").replace(".d/", ".d.hg/"); + StringBuilder sb = new StringBuilder(path.length() << 1); + if (store || fncache) { + // encodefilename + final String reservedChars = "\\:*?\"<>|"; + // in fact, \\ is unlikely to match, ever - we've replaced all of them already, above. Just regards to store.py + int x; + char[] hexByte = new char[2]; + for (int i = 0; i < path.length(); i++) { + final char ch = path.charAt(i); + if (ch >= 'a' && ch <= 'z') { + sb.append(ch); // POIRAE + } else if (ch >= 'A' && ch <= 'Z') { + sb.append('_'); + sb.append(Character.toLowerCase(ch)); // Perhaps, (char) (((int) ch) + 32)? Even better, |= 0x20? + } else if ( (x = reservedChars.indexOf(ch)) != -1) { + sb.append('~'); + sb.append(toHexByte(reservedChars.charAt(x), hexByte)); + } else if ((ch >= '~' /*126*/ && ch <= 255) || ch < ' ' /*32*/) { + sb.append('~'); + sb.append(toHexByte(ch, hexByte)); + } else if (ch == '_') { + // note, encoding from store.py:_buildencodefun and :_build_lower_encodefun + // differ in the way they process '_' (latter doesn't escape it) + sb.append('_'); + sb.append('_'); + } else { + sb.append(ch); + } + } + // auxencode + if (fncache) { + x = 0; // last segment start + final TreeSet windowsReservedFilenames = new TreeSet(); + windowsReservedFilenames.addAll(Arrays.asList("con prn aux nul com1 com2 com3 com4 com5 com6 com7 com8 com9 lpt1 lpt2 lpt3 lpt4 lpt5 lpt6 lpt7 lpt8 lpt9".split(" "))); + do { + int i = sb.indexOf("/", x); + if (i == -1) { + i = sb.length(); + } + // windows reserved filenames are at least of length 3 + if (i - x >= 3) { + boolean found = false; + if (i-x == 3) { + found = windowsReservedFilenames.contains(sb.subSequence(x, i)); + } else if (sb.charAt(x+3) == '.') { // implicit i-x > 3 + found = windowsReservedFilenames.contains(sb.subSequence(x, x+3)); + } else if (i-x > 4 && sb.charAt(x+4) == '.') { + found = windowsReservedFilenames.contains(sb.subSequence(x, x+4)); + } + if (found) { + sb.setCharAt(x, '~'); + sb.insert(x+1, toHexByte(sb.charAt(x+2), hexByte)); + i += 2; + } + } + if (dotencode && (sb.charAt(x) == '.' || sb.charAt(x) == ' ')) { + sb.insert(x+1, toHexByte(sb.charAt(x), hexByte)); + sb.setCharAt(x, '~'); // setChar *after* charAt/insert to get ~2e, not ~7e for '.' + i += 2; + } + x = i+1; + } while (x < sb.length()); + } + } + final int MAX_PATH_LEN_IN_HGSTORE = 120; + if (fncache && (sb.length() + STR_DATA.length() > MAX_PATH_LEN_IN_HGSTORE)) { + throw HgRepository.notImplemented(); // FIXME digest and fncache use + } + if (store) { + sb.insert(0, STR_STORE + STR_DATA); + } + sb.append(".i"); + return sb.toString(); + } + + private static char[] toHexByte(int ch, char[] buf) { + assert buf.length > 1; + final String hexDigits = "0123456789abcdef"; + buf[0] = hexDigits.charAt((ch & 0x00F0) >>> 4); + buf[1] = hexDigits.charAt(ch & 0x0F); + return buf; + } +} diff -r 0d279bcc4442 -r 6f1b88693d48 src/org/tmatesoft/hg/repo/Changelog.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/org/tmatesoft/hg/repo/Changelog.java Mon Jan 24 03:14:45 2011 +0100 @@ -0,0 +1,83 @@ +/* + * Copyright (c) 2010-2011 TMate Software Ltd + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; version 2 of the License. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * For information on how to redistribute this software under + * the terms of a license other than GNU General Public License + * contact TMate Software at support@svnkit.com + */ +package org.tmatesoft.hg.repo; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + +import org.tmatesoft.hg.core.Nodeid; + + +/** + * Representation of the Mercurial changelog file (list of ChangeSets) + * + * @author Artem Tikhomirov + * @author TMate Software Ltd. + */ +public class Changelog extends Revlog { + + /*package-local*/ Changelog(HgRepository hgRepo, RevlogStream content) { + super(hgRepo, content); + } + + public void all(final Changeset.Inspector inspector) { + range(0, content.revisionCount() - 1, inspector); + } + + public void range(int start, int end, final Changeset.Inspector inspector) { + Revlog.Inspector i = new Revlog.Inspector() { + + public void next(int revisionNumber, int actualLen, int baseRevision, int linkRevision, int parent1Revision, int parent2Revision, byte[] nodeid, byte[] data) { + Changeset cset = Changeset.parse(data, 0, data.length); + // XXX there's no guarantee for Changeset.Callback that distinct instance comes each time, consider instance reuse + inspector.next(revisionNumber, Nodeid.fromBinary(nodeid, 0), cset); + } + }; + content.iterate(start, end, true, i); + } + + public List range(int start, int end) { + final ArrayList rv = new ArrayList(end - start + 1); + Revlog.Inspector i = new Revlog.Inspector() { + + public void next(int revisionNumber, int actualLen, int baseRevision, int linkRevision, int parent1Revision, int parent2Revision, byte[] nodeid, byte[] data) { + Changeset cset = Changeset.parse(data, 0, data.length); + rv.add(cset); + } + }; + content.iterate(start, end, true, i); + return rv; + } + + public void range(final Changeset.Inspector inspector, final int... revisions) { + if (revisions == null || revisions.length == 0) { + return; + } + Revlog.Inspector i = new Revlog.Inspector() { + + public void next(int revisionNumber, int actualLen, int baseRevision, int linkRevision, int parent1Revision, int parent2Revision, byte[] nodeid, byte[] data) { + if (Arrays.binarySearch(revisions, revisionNumber) >= 0) { + Changeset cset = Changeset.parse(data, 0, data.length); + inspector.next(revisionNumber, Nodeid.fromBinary(nodeid, 0), cset); + } + } + }; + Arrays.sort(revisions); + content.iterate(revisions[0], revisions[revisions.length - 1], true, i); + } +} diff -r 0d279bcc4442 -r 6f1b88693d48 src/org/tmatesoft/hg/repo/Changeset.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/org/tmatesoft/hg/repo/Changeset.java Mon Jan 24 03:14:45 2011 +0100 @@ -0,0 +1,228 @@ +/* + * Copyright (c) 2010-2011 TMate Software Ltd + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; version 2 of the License. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * For information on how to redistribute this software under + * the terms of a license other than GNU General Public License + * contact TMate Software at support@svnkit.com + */ +package org.tmatesoft.hg.repo; + +import java.io.UnsupportedEncodingException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.Date; +import java.util.Formatter; +import java.util.HashMap; +import java.util.List; +import java.util.Locale; +import java.util.Map; + +import org.tmatesoft.hg.core.Nodeid; + +/** + * @see mercurial/changelog.py:read() + *
+        format used:
+        nodeid\n        : manifest node in ascii
+        user\n          : user, no \n or \r allowed
+        time tz extra\n : date (time is int or float, timezone is int)
+                        : extra is metadatas, encoded and separated by '\0'
+                        : older versions ignore it
+        files\n\n       : files modified by the cset, no \n or \r allowed
+        (.*)            : comment (free text, ideally utf-8)
+
+        changelog v0 doesn't use extra
+ * 
+ * + * @author Artem Tikhomirov + * @author TMate Software Ltd. + */ +public class Changeset implements Cloneable /*for those that would like to keep a copy*/ { + // TODO immutable + private /*final*/ Nodeid manifest; + private String user; + private String comment; + private List files; // unmodifiable collection (otherwise #files() and implicit #clone() shall be revised) + private Date time; + private int timezone; // not sure it's of any use + private Map extras; + + private Changeset() { + } + + public Nodeid manifest() { + return manifest; + } + + public String user() { + return user; + } + + public String comment() { + return comment; + } + + public List files() { + return files; + } + + public Date date() { + return time; + } + + public String dateString() { + StringBuilder sb = new StringBuilder(30); + Formatter f = new Formatter(sb, Locale.US); + f.format("%ta % extras() { + return extras; + } + + public String branch() { + return extras.get("branch"); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("Changeset {"); + sb.append("User: ").append(user).append(", "); + sb.append("Comment: ").append(comment).append(", "); + sb.append("Manifest: ").append(manifest).append(", "); + sb.append("Date: ").append(time).append(", "); + sb.append("Files: ").append(files.size()); + for (String s : files) { + sb.append(", ").append(s); + } + if (extras != null) { + sb.append(", Extra: ").append(extras); + } + sb.append("}"); + return sb.toString(); + } + + @Override + public Changeset clone() { + try { + return (Changeset) super.clone(); + } catch (CloneNotSupportedException ex) { + throw new InternalError(ex.toString()); + } + } + + public static Changeset parse(byte[] data, int offset, int length) { + Changeset rv = new Changeset(); + rv.init(data, offset, length); + return rv; + } + + /*package-local*/ void init(byte[] data, int offset, int length) { + final int bufferEndIndex = offset + length; + final byte lineBreak = (byte) '\n'; + int breakIndex1 = indexOf(data, lineBreak, offset, bufferEndIndex); + if (breakIndex1 == -1) { + throw new IllegalArgumentException("Bad Changeset data"); + } + Nodeid _nodeid = Nodeid.fromAscii(data, 0, breakIndex1); + int breakIndex2 = indexOf(data, lineBreak, breakIndex1+1, bufferEndIndex); + if (breakIndex2 == -1) { + throw new IllegalArgumentException("Bad Changeset data"); + } + String _user = new String(data, breakIndex1+1, breakIndex2 - breakIndex1 - 1); + int breakIndex3 = indexOf(data, lineBreak, breakIndex2+1, bufferEndIndex); + if (breakIndex3 == -1) { + throw new IllegalArgumentException("Bad Changeset data"); + } + String _timeString = new String(data, breakIndex2+1, breakIndex3 - breakIndex2 - 1); + int space1 = _timeString.indexOf(' '); + if (space1 == -1) { + throw new IllegalArgumentException("Bad Changeset data"); + } + int space2 = _timeString.indexOf(' ', space1+1); + if (space2 == -1) { + space2 = _timeString.length(); + } + long unixTime = Long.parseLong(_timeString.substring(0, space1)); // XXX Float, perhaps + int _timezone = Integer.parseInt(_timeString.substring(space1+1, space2)); + // XXX not sure need to add timezone here - I can't figure out whether Hg keeps GMT time, and records timezone just for info, or unixTime is taken local + // on commit and timezone is recorded to adjust it to UTC. + Date _time = new Date(unixTime * 1000); + String _extras = space2 < _timeString.length() ? _timeString.substring(space2+1) : null; + Map _extrasMap; + if (_extras == null) { + _extrasMap = Collections.singletonMap("branch", "default"); + } else { + _extrasMap = new HashMap(); + for (String pair : _extras.split("\00")) { + int eq = pair.indexOf(':'); + // FIXME need to decode key/value, @see changelog.py:decodeextra + _extrasMap.put(pair.substring(0, eq), pair.substring(eq+1)); + } + if (!_extrasMap.containsKey("branch")) { + _extrasMap.put("branch", "default"); + } + _extrasMap = Collections.unmodifiableMap(_extrasMap); + } + + // + int lastStart = breakIndex3 + 1; + int breakIndex4 = indexOf(data, lineBreak, lastStart, bufferEndIndex); + ArrayList _files = new ArrayList(5); + while (breakIndex4 != -1 && breakIndex4 + 1 < bufferEndIndex) { + _files.add(new String(data, lastStart, breakIndex4 - lastStart)); + lastStart = breakIndex4 + 1; + if (data[breakIndex4 + 1] == lineBreak) { + // found \n\n + break; + } else { + breakIndex4 = indexOf(data, lineBreak, lastStart, bufferEndIndex); + } + } + if (breakIndex4 == -1 || breakIndex4 >= bufferEndIndex) { + throw new IllegalArgumentException("Bad Changeset data"); + } + String _comment; + try { + _comment = new String(data, breakIndex4+2, bufferEndIndex - breakIndex4 - 2, "UTF-8"); + } catch (UnsupportedEncodingException ex) { + _comment = ""; + throw new IllegalStateException("Could hardly happen"); + } + // change this instance at once, don't leave it partially changes in case of error + this.manifest = _nodeid; + this.user = _user; + this.time = _time; + this.timezone = _timezone; + this.files = Collections.unmodifiableList(_files); + this.comment = _comment; + this.extras = _extrasMap; + } + + private static int indexOf(byte[] src, byte what, int startOffset, int endIndex) { + for (int i = startOffset; i < endIndex; i++) { + if (src[i] == what) { + return i; + } + } + return -1; + } + + public interface Inspector { + // first(), last(), single(). + // + // TODO describe whether cset is new instance each time + void next(int revisionNumber, Nodeid nodeid, Changeset cset); + } +} diff -r 0d279bcc4442 -r 6f1b88693d48 src/org/tmatesoft/hg/repo/HgBundle.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/org/tmatesoft/hg/repo/HgBundle.java Mon Jan 24 03:14:45 2011 +0100 @@ -0,0 +1,160 @@ +/* + * Copyright (c) 2011 TMate Software Ltd + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; version 2 of the License. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * For information on how to redistribute this software under + * the terms of a license other than GNU General Public License + * contact TMate Software at support@svnkit.com + */ +package org.tmatesoft.hg.repo; + +import java.io.File; +import java.io.IOException; +import java.util.LinkedList; +import java.util.List; + +import org.tmatesoft.hg.core.Nodeid; +import org.tmatesoft.hg.internal.DataAccess; +import org.tmatesoft.hg.internal.DataAccessProvider; +import org.tmatesoft.hg.internal.DigestHelper; + + +/** + * @see http://mercurial.selenic.com/wiki/BundleFormat + * + * @author Artem Tikhomirov + * @author TMate Software Ltd. + */ +public class HgBundle { + + private final File bundleFile; + private final DataAccessProvider accessProvider; + + public HgBundle(DataAccessProvider dap, File bundle) { + accessProvider = dap; + bundleFile = bundle; + } + + public void changes(HgRepository hgRepo) throws IOException { + DataAccess da = accessProvider.create(bundleFile); + DigestHelper dh = new DigestHelper(); + try { + List changelogGroup = readGroup(da); + if (changelogGroup.isEmpty()) { + throw new IllegalStateException("No changelog group in the bundle"); // XXX perhaps, just be silent and/or log? + } + // XXX in fact, bundle not necessarily starts with the first revision missing in hgRepo + // need to 'scroll' till the last one common. + final Nodeid base = changelogGroup.get(0).firstParent(); + if (!hgRepo.getChangelog().isKnown(base)) { + throw new IllegalArgumentException("unknown parent"); + } + // BundleFormat wiki says: + // Each Changelog entry patches the result of all previous patches + // (the previous, or parent patch of a given patch p is the patch that has a node equal to p's p1 field) + byte[] baseRevContent = hgRepo.getChangelog().content(base); + for (GroupElement ge : changelogGroup) { + byte[] csetContent = RevlogStream.apply(baseRevContent, -1, ge.patches); + dh = dh.sha1(ge.firstParent(), ge.secondParent(), csetContent); // XXX ge may give me access to byte[] content of nodeid directly, perhaps, I don't need DH to be friend of Nodeid? + if (!ge.node().equalsTo(dh.asBinary())) { + throw new IllegalStateException("Integrity check failed on " + bundleFile + ", node:" + ge.node()); + } + Changeset cs = Changeset.parse(csetContent, 0, csetContent.length); + System.out.println(cs.toString()); + baseRevContent = csetContent; + } + } finally { + da.done(); + } + } + + public void dump() throws IOException { + DataAccess da = accessProvider.create(bundleFile); + try { + LinkedList names = new LinkedList(); + if (!da.isEmpty()) { + System.out.println("Changelog group"); + List changelogGroup = readGroup(da); + for (GroupElement ge : changelogGroup) { + System.out.printf(" %s %s %s %s; patches:%d\n", ge.node(), ge.firstParent(), ge.secondParent(), ge.cset(), ge.patches.size()); + } + System.out.println("Manifest group"); + List manifestGroup = readGroup(da); + for (GroupElement ge : manifestGroup) { + System.out.printf(" %s %s %s %s; patches:%d\n", ge.node(), ge.firstParent(), ge.secondParent(), ge.cset(), ge.patches.size()); + } + while (!da.isEmpty()) { + int fnameLen = da.readInt(); + if (fnameLen <= 4) { + break; // null chunk, the last one. + } + byte[] fname = new byte[fnameLen - 4]; + da.readBytes(fname, 0, fname.length); + names.add(new String(fname)); + List fileGroup = readGroup(da); + System.out.println(names.getLast()); + for (GroupElement ge : fileGroup) { + System.out.printf(" %s %s %s %s; patches:%d\n", ge.node(), ge.firstParent(), ge.secondParent(), ge.cset(), ge.patches.size()); + } + } + } + System.out.println(names.size()); + for (String s : names) { + System.out.println(s); + } + } finally { + da.done(); + } + } + + private static List readGroup(DataAccess da) throws IOException { + int len = da.readInt(); + LinkedList rv = new LinkedList(); + while (len > 4 && !da.isEmpty()) { + byte[] nb = new byte[80]; + da.readBytes(nb, 0, 80); + int dataLength = len-84; + LinkedList patches = new LinkedList(); + while (dataLength > 0) { + RevlogStream.PatchRecord pr = RevlogStream.PatchRecord.read(da); + patches.add(pr); + dataLength -= pr.len + 12; + } + rv.add(new GroupElement(nb, patches)); + len = da.isEmpty() ? 0 : da.readInt(); + } + return rv; + } + + static class GroupElement { + private byte[] header; // byte[80] takes 120 bytes, 4 Nodeids - 192 + private List patches; + + GroupElement(byte[] fourNodeids, List patchList) { + assert fourNodeids != null && fourNodeids.length == 80; + // patchList.size() > 0 + header = fourNodeids; + patches = patchList; + } + public Nodeid node() { + return Nodeid.fromBinary(header, 0); + } + public Nodeid firstParent() { + return Nodeid.fromBinary(header, 20); + } + public Nodeid secondParent() { + return Nodeid.fromBinary(header, 40); + } + public Nodeid cset() { // cs seems to be changeset + return Nodeid.fromBinary(header, 60); + } + } +} diff -r 0d279bcc4442 -r 6f1b88693d48 src/org/tmatesoft/hg/repo/HgDataFile.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/org/tmatesoft/hg/repo/HgDataFile.java Mon Jan 24 03:14:45 2011 +0100 @@ -0,0 +1,79 @@ +/* + * Copyright (c) 2010-2011 TMate Software Ltd + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; version 2 of the License. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * For information on how to redistribute this software under + * the terms of a license other than GNU General Public License + * contact TMate Software at support@svnkit.com + */ +package org.tmatesoft.hg.repo; + +import static org.tmatesoft.hg.repo.HgRepository.TIP; + +import org.tmatesoft.hg.core.Nodeid; +import org.tmatesoft.hg.core.Path; + + + +/** + * ? name:HgFileNode? + * + * @author Artem Tikhomirov + * @author TMate Software Ltd. + */ +public class HgDataFile extends Revlog { + + // absolute from repo root? + // slashes, unix-style? + // repo location agnostic, just to give info to user, not to access real storage + private final Path path; + + /*package-local*/HgDataFile(HgRepository hgRepo, Path path, RevlogStream content) { + super(hgRepo, content); + this.path = path; + } + + public boolean exists() { + return content != null; // XXX need better impl + } + + public Path getPath() { + return path; // hgRepo.backresolve(this) -> name? + } + + public int length(Nodeid nodeid) { + return content.dataLength(getLocalRevisionNumber(nodeid)); + } + + public byte[] content() { + return content(TIP); + } + + public void history(Changeset.Inspector inspector) { + history(0, content.revisionCount() - 1, inspector); + } + + public void history(int start, int end, Changeset.Inspector inspector) { + if (!exists()) { + throw new IllegalStateException("Can't get history of invalid repository file node"); + } + final int[] commitRevisions = new int[end - start + 1]; + Revlog.Inspector insp = new Revlog.Inspector() { + int count = 0; + + public void next(int revisionNumber, int actualLen, int baseRevision, int linkRevision, int parent1Revision, int parent2Revision, byte[] nodeid, byte[] data) { + commitRevisions[count++] = linkRevision; + } + }; + content.iterate(start, end, false, insp); + getRepo().getChangelog().range(inspector, commitRevisions); + } +} diff -r 0d279bcc4442 -r 6f1b88693d48 src/org/tmatesoft/hg/repo/HgDirstate.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/org/tmatesoft/hg/repo/HgDirstate.java Mon Jan 24 03:14:45 2011 +0100 @@ -0,0 +1,179 @@ +/* + * Copyright (c) 2010-2011 TMate Software Ltd + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; version 2 of the License. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * For information on how to redistribute this software under + * the terms of a license other than GNU General Public License + * contact TMate Software at support@svnkit.com + */ +package org.tmatesoft.hg.repo; + +import java.io.File; +import java.io.IOException; +import java.util.Collections; +import java.util.LinkedHashMap; +import java.util.Map; +import java.util.TreeSet; + +import org.tmatesoft.hg.internal.DataAccess; +import org.tmatesoft.hg.internal.DataAccessProvider; + + +/** + * @see http://mercurial.selenic.com/wiki/DirState + * @see http://mercurial.selenic.com/wiki/FileFormats#dirstate + * + * @author Artem Tikhomirov + * @author TMate Software Ltd. + */ +public class HgDirstate { + + private final DataAccessProvider accessProvider; + private final File dirstateFile; + private Map normal; + private Map added; + private Map removed; + private Map merged; + + /*package-local*/ HgDirstate() { + // empty instance + accessProvider = null; + dirstateFile = null; + } + + public HgDirstate(DataAccessProvider dap, File dirstate) { + accessProvider = dap; + dirstateFile = dirstate; + } + + private void read() { + normal = added = removed = merged = Collections.emptyMap(); + if (dirstateFile == null || !dirstateFile.exists()) { + return; + } + DataAccess da = accessProvider.create(dirstateFile); + if (da.isEmpty()) { + return; + } + // not sure linked is really needed here, just for ease of debug + normal = new LinkedHashMap(); + added = new LinkedHashMap(); + removed = new LinkedHashMap(); + merged = new LinkedHashMap(); + try { + // XXX skip(40) if we don't need these? + byte[] parents = new byte[40]; + da.readBytes(parents, 0, 40); + parents = null; + do { + final byte state = da.readByte(); + final int fmode = da.readInt(); + final int size = da.readInt(); + final int time = da.readInt(); + final int nameLen = da.readInt(); + String fn1 = null, fn2 = null; + byte[] name = new byte[nameLen]; + da.readBytes(name, 0, nameLen); + for (int i = 0; i < nameLen; i++) { + if (name[i] == 0) { + fn1 = new String(name, 0, i, "UTF-8"); // XXX unclear from documentation what encoding is used there + fn2 = new String(name, i+1, nameLen - i - 1, "UTF-8"); // need to check with different system codepages + break; + } + } + if (fn1 == null) { + fn1 = new String(name); + } + Record r = new Record(fmode, size, time, fn1, fn2); + if (state == 'n') { + normal.put(r.name1, r); + } else if (state == 'a') { + added.put(r.name1, r); + } else if (state == 'r') { + removed.put(r.name1, r); + } else if (state == 'm') { + merged.put(r.name1, r); + } else { + // FIXME log error? + } + } while (!da.isEmpty()); + } catch (IOException ex) { + ex.printStackTrace(); // FIXME log error, clean dirstate? + } finally { + da.done(); + } + } + + // new, modifiable collection + /*package-local*/ TreeSet all() { + read(); + TreeSet rv = new TreeSet(); + @SuppressWarnings("unchecked") + Map[] all = new Map[] { normal, added, removed, merged }; + for (int i = 0; i < all.length; i++) { + for (Record r : all[i].values()) { + rv.add(r.name1); + } + } + return rv; + } + + /*package-local*/ Record checkNormal(String fname) { + return normal.get(fname); + } + + /*package-local*/ Record checkAdded(String fname) { + return added.get(fname); + } + /*package-local*/ Record checkRemoved(String fname) { + return removed.get(fname); + } + /*package-local*/ Record checkMerged(String fname) { + return merged.get(fname); + } + + + + + public void dump() { + read(); + @SuppressWarnings("unchecked") + Map[] all = new Map[] { normal, added, removed, merged }; + char[] x = new char[] {'n', 'a', 'r', 'm' }; + for (int i = 0; i < all.length; i++) { + for (Record r : all[i].values()) { + System.out.printf("%c %3o%6d %30tc\t\t%s", x[i], r.mode, r.size, (long) r.time * 1000, r.name1); + if (r.name2 != null) { + System.out.printf(" --> %s", r.name2); + } + System.out.println(); + } + System.out.println(); + } + } + + /*package-local*/ static class Record { + final int mode; + final int size; + final int time; + final String name1; + final String name2; + + public Record(int fmode, int fsize, int ftime, String name1, String name2) { + mode = fmode; + size = fsize; + time = ftime; + this.name1 = name1; + this.name2 = name2; + + } + } +} diff -r 0d279bcc4442 -r 6f1b88693d48 src/org/tmatesoft/hg/repo/HgIgnore.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/org/tmatesoft/hg/repo/HgIgnore.java Mon Jan 24 03:14:45 2011 +0100 @@ -0,0 +1,89 @@ +/* + * Copyright (c) 2010-2011 TMate Software Ltd + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; version 2 of the License. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * For information on how to redistribute this software under + * the terms of a license other than GNU General Public License + * contact TMate Software at support@svnkit.com + */ +package org.tmatesoft.hg.repo; + +import java.io.BufferedReader; +import java.io.File; +import java.io.FileReader; +import java.io.IOException; +import java.util.Collections; +import java.util.Set; +import java.util.TreeSet; + +/** + * + * @author Artem Tikhomirov + * @author TMate Software Ltd. + */ +public class HgIgnore { + + private final HgRepository repo; + private Set entries; + + public HgIgnore(HgRepository localRepo) { + this.repo = localRepo; + } + + private void read() { + entries = Collections.emptySet(); + File hgignoreFile = new File(repo.getRepositoryRoot().getParentFile(), ".hgignore"); + if (!hgignoreFile.exists()) { + return; + } + entries = new TreeSet(); + try { + BufferedReader fr = new BufferedReader(new FileReader(hgignoreFile)); + String line; + while ((line = fr.readLine()) != null) { + // FIXME need to detect syntax:glob and other parameters + entries.add(line.trim()); // shall I account for local paths in the file (i.e. back-slashed on windows)? + } + } catch (IOException ex) { + ex.printStackTrace(); // log warn + } + } + + public void reset() { + // FIXME does anyone really need to clear HgIgnore? Perhaps, repo may return new instance each time, + // which is used throughout invocation and then discarded? + entries = null; + } + + public boolean isIgnored(String path) { + if (entries == null) { + read(); + } + if (entries.contains(path)) { + // easy part + return true; + } + // substrings are memory-friendly + int x = 0, i = path.indexOf('/', 0); + while (i != -1) { + if (entries.contains(path.substring(x, i))) { + return true; + } + // try one with ending slash + if (entries.contains(path.substring(x, i+1))) { // even if i is last index, i+1 is safe here + return true; + } + x = i+1; + i = path.indexOf('/', x); + } + return false; + } +} diff -r 0d279bcc4442 -r 6f1b88693d48 src/org/tmatesoft/hg/repo/HgManifest.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/org/tmatesoft/hg/repo/HgManifest.java Mon Jan 24 03:14:45 2011 +0100 @@ -0,0 +1,80 @@ +/* + * Copyright (c) 2010-2011 TMate Software Ltd + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; version 2 of the License. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * For information on how to redistribute this software under + * the terms of a license other than GNU General Public License + * contact TMate Software at support@svnkit.com + */ +package org.tmatesoft.hg.repo; + +import org.tmatesoft.hg.core.Nodeid; + + +/** + * + * @author Artem Tikhomirov + * @author TMate Software Ltd. + */ +public class HgManifest extends Revlog { + + /*package-local*/ HgManifest(HgRepository hgRepo, RevlogStream content) { + super(hgRepo, content); + } + + public void walk(int start, int end, final Inspector inspector) { + Revlog.Inspector insp = new Revlog.Inspector() { + + private boolean gtg = true; // good to go + + public void next(int revisionNumber, int actualLen, int baseRevision, int linkRevision, int parent1Revision, int parent2Revision, byte[] nodeid, byte[] data) { + if (!gtg) { + return; + } + gtg = gtg && inspector.begin(revisionNumber, new Nodeid(nodeid, true)); + int i; + String fname = null; + String flags = null; + Nodeid nid = null; + for (i = 0; gtg && i < actualLen; i++) { + int x = i; + for( ; data[i] != '\n' && i < actualLen; i++) { + if (fname == null && data[i] == 0) { + fname = new String(data, x, i - x); + x = i+1; + } + } + if (i < actualLen) { + assert data[i] == '\n'; + int nodeidLen = i - x < 40 ? i-x : 40; + nid = Nodeid.fromAscii(data, x, nodeidLen); + if (nodeidLen + x < i) { + // 'x' and 'l' for executable bits and symlinks? + // hg --debug manifest shows 644 for each regular file in my repo + flags = new String(data, x + nodeidLen, i-x-nodeidLen); + } + gtg = gtg && inspector.next(nid, fname, flags); + } + nid = null; + fname = flags = null; + } + gtg = gtg && inspector.end(revisionNumber); + } + }; + content.iterate(start, end, true, insp); + } + + public interface Inspector { + boolean begin(int revision, Nodeid nid); + boolean next(Nodeid nid, String fname, String flags); + boolean end(int revision); + } +} diff -r 0d279bcc4442 -r 6f1b88693d48 src/org/tmatesoft/hg/repo/HgRepository.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/org/tmatesoft/hg/repo/HgRepository.java Mon Jan 24 03:14:45 2011 +0100 @@ -0,0 +1,185 @@ +/* + * Copyright (c) 2010-2011 TMate Software Ltd + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; version 2 of the License. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * For information on how to redistribute this software under + * the terms of a license other than GNU General Public License + * contact TMate Software at support@svnkit.com + */ +package org.tmatesoft.hg.repo; + +import java.io.File; +import java.io.IOException; +import java.lang.ref.SoftReference; +import java.util.HashMap; + +import org.tmatesoft.hg.core.Path; +import org.tmatesoft.hg.internal.DataAccessProvider; +import org.tmatesoft.hg.internal.RequiresFile; +import org.tmatesoft.hg.util.FileWalker; +import org.tmatesoft.hg.util.PathRewrite; + + + +/** + * Shall be as state-less as possible, all the caching happens outside the repo, in commands/walkers + * + * @author Artem Tikhomirov + * @author TMate Software Ltd. + */ +public final class HgRepository { + + public static final int TIP = -1; + public static final int BAD_REVISION = Integer.MIN_VALUE; + public static final int WORKING_COPY = -2; + + // temp aux marker method + public static IllegalStateException notImplemented() { + return new IllegalStateException("Not implemented"); + } + + private final File repoDir; // .hg folder + private final String repoLocation; + private final DataAccessProvider dataAccess; + private final PathRewrite normalizePath = new PathRewrite() { + + public String rewrite(String path) { + // TODO handle . and .. (although unlikely to face them from GUI client) + path = path.replace('\\', '/').replace("//", "/"); + if (path.startsWith("/")) { + path = path.substring(1); + } + return path; + } + }; + private final PathRewrite dataPathHelper; + private final PathRewrite repoPathHelper; + + private Changelog changelog; + private HgManifest manifest; + private HgTags tags; + // XXX perhaps, shall enable caching explicitly + private final HashMap> streamsCache = new HashMap>(); + + private final org.tmatesoft.hg.internal.Internals impl = new org.tmatesoft.hg.internal.Internals(); + + HgRepository(String repositoryPath) { + repoDir = null; + repoLocation = repositoryPath; + dataAccess = null; + dataPathHelper = repoPathHelper = null; + } + + HgRepository(File repositoryRoot) throws IOException { + assert ".hg".equals(repositoryRoot.getName()) && repositoryRoot.isDirectory(); + repoDir = repositoryRoot; + repoLocation = repositoryRoot.getParentFile().getCanonicalPath(); + dataAccess = new DataAccessProvider(); + parseRequires(); + dataPathHelper = impl.buildDataFilesHelper(); + repoPathHelper = impl.buildRepositoryFilesHelper(); + } + + + public String getLocation() { + return repoLocation; + } + + public boolean isInvalid() { + return repoDir == null || !repoDir.exists() || !repoDir.isDirectory(); + } + + public Changelog getChangelog() { + if (this.changelog == null) { + String storagePath = repoPathHelper.rewrite("00changelog.i"); + RevlogStream content = resolve(Path.create(storagePath)); + this.changelog = new Changelog(this, content); + } + return this.changelog; + } + + public HgManifest getManifest() { + if (this.manifest == null) { + RevlogStream content = resolve(Path.create(repoPathHelper.rewrite("00manifest.i"))); + this.manifest = new HgManifest(this, content); + } + return this.manifest; + } + + public final HgTags getTags() { + if (tags == null) { + tags = new HgTags(); + } + return tags; + } + + public HgDataFile getFileNode(String path) { + String nPath = normalizePath.rewrite(path); + String storagePath = dataPathHelper.rewrite(nPath); + return getFileNode(Path.create(storagePath)); + } + + public HgDataFile getFileNode(Path path) { + RevlogStream content = resolve(path); + // XXX no content when no file? or HgDataFile.exists() to detect that? How about files that were removed in previous releases? + return new HgDataFile(this, path, content); + } + + public PathRewrite getPathHelper() { // Really need to be public? + return normalizePath; + } + + /*package-local*/ File getRepositoryRoot() { + return repoDir; + } + + // XXX package-local, unless there are cases when required from outside (guess, working dir/revision walkers may hide dirstate access and no public visibility needed) + /*package-local*/ final HgDirstate loadDirstate() { + return new HgDirstate(getDataAccess(), new File(repoDir, "dirstate")); + } + + // package-local, see comment for loadDirstate + /*package-local*/ final HgIgnore loadIgnore() { + return new HgIgnore(this); + } + + /*package-local*/ DataAccessProvider getDataAccess() { + return dataAccess; + } + + // FIXME not sure repository shall create walkers + /*package-local*/ FileWalker createWorkingDirWalker() { + return new FileWalker(repoDir.getParentFile()); + } + + /** + * Perhaps, should be separate interface, like ContentLookup + * path - repository storage path (i.e. one usually with .i or .d) + */ + /*package-local*/ RevlogStream resolve(Path path) { + final SoftReference ref = streamsCache.get(path); + RevlogStream cached = ref == null ? null : ref.get(); + if (cached != null) { + return cached; + } + File f = new File(repoDir, path.toString()); + if (f.exists()) { + RevlogStream s = new RevlogStream(dataAccess, f); + streamsCache.put(path, new SoftReference(s)); + return s; + } + return null; // XXX empty stream instead? + } + + private void parseRequires() { + new RequiresFile().parse(impl, new File(repoDir, "requires")); + } +} diff -r 0d279bcc4442 -r 6f1b88693d48 src/org/tmatesoft/hg/repo/HgTags.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/org/tmatesoft/hg/repo/HgTags.java Mon Jan 24 03:14:45 2011 +0100 @@ -0,0 +1,40 @@ +/* + * Copyright (c) 2011 TMate Software Ltd + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; version 2 of the License. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * For information on how to redistribute this software under + * the terms of a license other than GNU General Public License + * contact TMate Software at support@svnkit.com + */ +package org.tmatesoft.hg.repo; + +import java.util.Collections; +import java.util.List; + +import org.tmatesoft.hg.core.Nodeid; + +/** + * FIXME Place-holder, implement + * + * @author Artem Tikhomirov + * @author TMate Software Ltd. + */ +public class HgTags { + + public List tags(Nodeid nid) { + return Collections.emptyList(); + } + + public boolean isTagged(Nodeid nid) { + // TODO implement + return false; + } +} diff -r 0d279bcc4442 -r 6f1b88693d48 src/org/tmatesoft/hg/repo/Internals.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/org/tmatesoft/hg/repo/Internals.java Mon Jan 24 03:14:45 2011 +0100 @@ -0,0 +1,49 @@ +/* + * Copyright (c) 2011 TMate Software Ltd + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; version 2 of the License. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * For information on how to redistribute this software under + * the terms of a license other than GNU General Public License + * contact TMate Software at support@svnkit.com + */ +package org.tmatesoft.hg.repo; + + +/** + * DO NOT USE THIS CLASS, INTENDED FOR TESTING PURPOSES. + * + * Debug helper, to access otherwise restricted (package-local) methods + * + * @author Artem Tikhomirov + * @author TMate Software Ltd. + + */ +public class Internals { + + private final HgRepository repo; + + public Internals(HgRepository hgRepo) { + repo = hgRepo; + } + + public void dumpDirstate() { + repo.loadDirstate().dump(); + } + + public boolean[] checkIgnored(String... toCheck) { + HgIgnore ignore = repo.loadIgnore(); + boolean[] rv = new boolean[toCheck.length]; + for (int i = 0; i < toCheck.length; i++) { + rv[i] = ignore.isIgnored(toCheck[i]); + } + return rv; + } +} diff -r 0d279bcc4442 -r 6f1b88693d48 src/org/tmatesoft/hg/repo/Lookup.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/org/tmatesoft/hg/repo/Lookup.java Mon Jan 24 03:14:45 2011 +0100 @@ -0,0 +1,49 @@ +/* + * Copyright (c) 2010-2011 TMate Software Ltd + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; version 2 of the License. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * For information on how to redistribute this software under + * the terms of a license other than GNU General Public License + * contact TMate Software at support@svnkit.com + */ +package org.tmatesoft.hg.repo; + +import java.io.File; + +/** + * + * @author Artem Tikhomirov + * @author TMate Software Ltd. + */ +public class Lookup { + + public HgRepository detectFromWorkingDir() throws Exception { + return detect(System.getProperty("user.dir")); + } + + public HgRepository detect(String location) throws Exception /*FIXME Exception type, RepoInitException? */ { + File dir = new File(location); + File repository; + do { + repository = new File(dir, ".hg"); + if (repository.exists() && repository.isDirectory()) { + break; + } + repository = null; + dir = dir.getParentFile(); + + } while(dir != null); + if (repository == null) { + return new HgRepository(location); + } + return new HgRepository(repository); + } +} diff -r 0d279bcc4442 -r 6f1b88693d48 src/org/tmatesoft/hg/repo/Revlog.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/org/tmatesoft/hg/repo/Revlog.java Mon Jan 24 03:14:45 2011 +0100 @@ -0,0 +1,262 @@ +/* + * Copyright (c) 2010-2011 TMate Software Ltd + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; version 2 of the License. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * For information on how to redistribute this software under + * the terms of a license other than GNU General Public License + * contact TMate Software at support@svnkit.com + */ +package org.tmatesoft.hg.repo; + +import static org.tmatesoft.hg.repo.HgRepository.TIP; + +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.LinkedHashSet; +import java.util.Map; +import java.util.Set; + +import org.tmatesoft.hg.core.Nodeid; + + +/** + * + * @author Artem Tikhomirov + * @author TMate Software Ltd. + */ +abstract class Revlog { + + private final HgRepository hgRepo; + protected final RevlogStream content; + + protected Revlog(HgRepository hgRepo, RevlogStream content) { + if (hgRepo == null) { + throw new IllegalArgumentException(); + } + if (content == null) { + throw new IllegalArgumentException(); + } + this.hgRepo = hgRepo; + this.content = content; + } + + public final HgRepository getRepo() { + return hgRepo; + } + + public int getRevisionCount() { + return content.revisionCount(); + } + + public int getLocalRevisionNumber(Nodeid nid) { + int revision = content.findLocalRevisionNumber(nid); + if (revision == Integer.MIN_VALUE) { + throw new IllegalArgumentException(String.format("%s doesn't represent a revision of %s", nid.toString(), this /*XXX HgDataFile.getPath might be more suitable here*/)); + } + return revision; + } + + // Till now, i follow approach that NULL nodeid is never part of revlog + public boolean isKnown(Nodeid nodeid) { + final int rn = content.findLocalRevisionNumber(nodeid); + if (Integer.MIN_VALUE == rn) { + return false; + } + if (rn < 0 || rn >= content.revisionCount()) { + // Sanity check + throw new IllegalStateException(); + } + return true; + } + + /** + * Access to revision data as is (decompressed, but otherwise unprocessed, i.e. not parsed for e.g. changeset or manifest entries) + * @param nodeid + */ + public byte[] content(Nodeid nodeid) { + return content(getLocalRevisionNumber(nodeid)); + } + + /** + * @param revision - repo-local index of this file change (not a changelog revision number!) + */ + public byte[] content(int revision) { + final byte[][] dataPtr = new byte[1][]; + Revlog.Inspector insp = new Revlog.Inspector() { + public void next(int revisionNumber, int actualLen, int baseRevision, int linkRevision, int parent1Revision, int parent2Revision, byte[] nodeid, byte[] data) { + dataPtr[0] = data; + } + }; + content.iterate(revision, revision, true, insp); + return dataPtr[0]; + } + + /** + * XXX perhaps, return value Nodeid[2] and boolean needNodeids is better (and higher level) API for this query? + * + * @param revision - revision to query parents, or {@link HgRepository#TIP} + * @param parentRevisions - int[2] to get local revision numbers of parents (e.g. {6, -1}) + * @param parent1 - byte[20] or null, if parent's nodeid is not needed + * @param parent2 - byte[20] or null, if second parent's nodeid is not needed + * @return + */ + public void parents(int revision, int[] parentRevisions, byte[] parent1, byte[] parent2) { + if (revision != TIP && !(revision >= 0 && revision < content.revisionCount())) { + throw new IllegalArgumentException(String.valueOf(revision)); + } + if (parentRevisions == null || parentRevisions.length < 2) { + throw new IllegalArgumentException(String.valueOf(parentRevisions)); + } + if (parent1 != null && parent1.length < 20) { + throw new IllegalArgumentException(parent1.toString()); + } + if (parent2 != null && parent2.length < 20) { + throw new IllegalArgumentException(parent2.toString()); + } + class ParentCollector implements Revlog.Inspector { + public int p1 = -1; + public int p2 = -1; + public byte[] nodeid; + + public void next(int revisionNumber, int actualLen, int baseRevision, int linkRevision, int parent1Revision, int parent2Revision, byte[] nodeid, byte[] data) { + p1 = parent1Revision; + p2 = parent2Revision; + this.nodeid = new byte[20]; + // nodeid arg now comes in 32 byte from (as in file format description), however upper 12 bytes are zeros. + System.arraycopy(nodeid, nodeid.length > 20 ? nodeid.length - 20 : 0, this.nodeid, 0, 20); + } + }; + ParentCollector pc = new ParentCollector(); + content.iterate(revision, revision, false, pc); + parentRevisions[0] = pc.p1; + parentRevisions[1] = pc.p2; + if (parent1 != null) { + if (parentRevisions[0] == -1) { + Arrays.fill(parent1, 0, 20, (byte) 0); + } else { + content.iterate(parentRevisions[0], parentRevisions[0], false, pc); + System.arraycopy(pc.nodeid, 0, parent1, 0, 20); + } + } + if (parent2 != null) { + if (parentRevisions[1] == -1) { + Arrays.fill(parent2, 0, 20, (byte) 0); + } else { + content.iterate(parentRevisions[1], parentRevisions[1], false, pc); + System.arraycopy(pc.nodeid, 0, parent2, 0, 20); + } + } + } + + // FIXME byte[] data might be too expensive, for few usecases it may be better to have intermediate Access object (when we don't need full data + // instantly - e.g. calculate hash, or comparing two revisions + // XXX seems that RevlogStream is better place for this class. + public interface Inspector { + // XXX boolean retVal to indicate whether to continue? + // TODO specify nodeid and data length, and reuse policy (i.e. if revlog stream doesn't reuse nodeid[] for each call) + void next(int revisionNumber, int actualLen, int baseRevision, int linkRevision, int parent1Revision, int parent2Revision, byte[/*20*/] nodeid, byte[] data); + } + + /* + * XXX think over if it's better to do either: + * pw = getChangelog().new ParentWalker(); pw.init() and pass pw instance around as needed + * or + * add Revlog#getParentWalker(), static class, make cons() and #init package-local, and keep SoftReference to allow walker reuse. + * + * and yes, walker is not a proper name + */ + public final class ParentWalker { + private Map firstParent; + private Map secondParent; + private Set allNodes; + + public ParentWalker() { + firstParent = secondParent = Collections.emptyMap(); + allNodes = Collections.emptySet(); + } + + public void init() { + final RevlogStream stream = Revlog.this.content; + final int revisionCount = stream.revisionCount(); + firstParent = new HashMap(revisionCount); + secondParent = new HashMap(firstParent.size() >> 1); // assume branches/merges are less frequent + allNodes = new LinkedHashSet(); + + Inspector insp = new Inspector() { + final Nodeid[] sequentialRevisionNodeids = new Nodeid[revisionCount]; + int ix = 0; + public void next(int revisionNumber, int actualLen, int baseRevision, int linkRevision, int parent1Revision, int parent2Revision, byte[] nodeid, byte[] data) { + if (ix != revisionNumber) { + // XXX temp code, just to make sure I understand what's going on here + throw new IllegalStateException(); + } + if (parent1Revision >= revisionNumber || parent2Revision >= revisionNumber) { + throw new IllegalStateException(); // sanity, revisions are sequential + } + final Nodeid nid = new Nodeid(nodeid, true); + sequentialRevisionNodeids[ix++] = nid; + allNodes.add(nid); + if (parent1Revision != -1) { + firstParent.put(nid, sequentialRevisionNodeids[parent1Revision]); + if (parent2Revision != -1) { + secondParent.put(nid, sequentialRevisionNodeids[parent2Revision]); + } + } + } + }; + stream.iterate(0, -1, false, insp); + } + + public Set allNodes() { + return Collections.unmodifiableSet(allNodes); + } + + // FIXME need to decide whether Nodeid(00 * 20) is always known or not + public boolean knownNode(Nodeid nid) { + return allNodes.contains(nid); + } + + // null if none + public Nodeid firstParent(Nodeid nid) { + return firstParent.get(nid); + } + + // never null, Nodeid.NULL if none known + public Nodeid safeFirstParent(Nodeid nid) { + Nodeid rv = firstParent(nid); + return rv == null ? Nodeid.NULL : rv; + } + + public Nodeid secondParent(Nodeid nid) { + return secondParent.get(nid); + } + + public Nodeid safeSecondParent(Nodeid nid) { + Nodeid rv = secondParent(nid); + return rv == null ? Nodeid.NULL : rv; + } + + public boolean appendParentsOf(Nodeid nid, Collection c) { + Nodeid p1 = firstParent(nid); + boolean modified = false; + if (p1 != null) { + modified = c.add(p1); + Nodeid p2 = secondParent(nid); + if (p2 != null) { + modified = c.add(p2) || modified; + } + } + return modified; + } + } +} diff -r 0d279bcc4442 -r 6f1b88693d48 src/org/tmatesoft/hg/repo/RevlogStream.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/org/tmatesoft/hg/repo/RevlogStream.java Mon Jan 24 03:14:45 2011 +0100 @@ -0,0 +1,376 @@ +/* + * Copyright (c) 2010-2011 TMate Software Ltd + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; version 2 of the License. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * For information on how to redistribute this software under + * the terms of a license other than GNU General Public License + * contact TMate Software at support@svnkit.com + */ +package org.tmatesoft.hg.repo; + +import static org.tmatesoft.hg.repo.HgRepository.TIP; + +import java.io.File; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.LinkedList; +import java.util.List; +import java.util.zip.DataFormatException; +import java.util.zip.Inflater; + +import org.tmatesoft.hg.core.Nodeid; +import org.tmatesoft.hg.internal.DataAccess; +import org.tmatesoft.hg.internal.DataAccessProvider; + + +/** + * XXX move to .internal? + * ? Single RevlogStream per file per repository with accessor to record access session (e.g. with back/forward operations), + * or numerous RevlogStream with separate representation of the underlaying data (cached, lazy ChunkStream)? + * + * @see http://mercurial.selenic.com/wiki/Revlog + * @see http://mercurial.selenic.com/wiki/RevlogNG + * + * @author Artem Tikhomirov + * @author TMate Software Ltd. + */ +public class RevlogStream { + + private List index; // indexed access highly needed + private boolean inline = false; + private final File indexFile; + private final DataAccessProvider dataAccess; + + // if we need anything else from HgRepo, might replace DAP parameter with HgRepo and query it for DAP. + RevlogStream(DataAccessProvider dap, File indexFile) { + this.dataAccess = dap; + this.indexFile = indexFile; + } + + /*package*/ DataAccess getIndexStream() { + return dataAccess.create(indexFile); + } + + /*package*/ DataAccess getDataStream() { + final String indexName = indexFile.getName(); + File dataFile = new File(indexFile.getParentFile(), indexName.substring(0, indexName.length() - 1) + "d"); + return dataAccess.create(dataFile); + } + + public int revisionCount() { + initOutline(); + return index.size(); + } + + public int dataLength(int revision) { + // XXX in fact, use of iterate() instead of this implementation may be quite reasonable. + // + final int indexSize = revisionCount(); + DataAccess daIndex = getIndexStream(); // XXX may supply a hint that I'll need really few bytes of data (although at some offset) + if (revision == TIP) { + revision = indexSize - 1; + } + try { + int recordOffset = inline ? (int) index.get(revision).offset : revision * REVLOGV1_RECORD_SIZE; + daIndex.seek(recordOffset + 12); // 6+2+4 + int actualLen = daIndex.readInt(); + return actualLen; + } catch (IOException ex) { + ex.printStackTrace(); // log error. FIXME better handling + throw new IllegalStateException(ex); + } finally { + daIndex.done(); + } + } + + // Perhaps, RevlogStream should be limited to use of plain int revisions for access, + // while Nodeids should be kept on the level up, in Revlog. Guess, Revlog better keep + // map of nodeids, and once this comes true, we may get rid of this method. + // Unlike its counterpart, Revlog#getLocalRevisionNumber, doesn't fail with exception if node not found, + // returns a predefined constant instead + /*package-local*/ int findLocalRevisionNumber(Nodeid nodeid) { + // XXX this one may be implemented with iterate() once there's mechanism to stop iterations + final int indexSize = revisionCount(); + DataAccess daIndex = getIndexStream(); + try { + byte[] nodeidBuf = new byte[20]; + for (int i = 0; i < indexSize; i++) { + daIndex.skip(8); + int compressedLen = daIndex.readInt(); + daIndex.skip(20); + daIndex.readBytes(nodeidBuf, 0, 20); + if (nodeid.equalsTo(nodeidBuf)) { + return i; + } + daIndex.skip(inline ? 12 + compressedLen : 12); + } + } catch (IOException ex) { + ex.printStackTrace(); // log error. FIXME better handling + throw new IllegalStateException(ex); + } finally { + daIndex.done(); + } + return Integer.MIN_VALUE; + } + + + private final int REVLOGV1_RECORD_SIZE = 64; + + // should be possible to use TIP, ALL, or -1, -2, -n notation of Hg + // ? boolean needsNodeid + public void iterate(int start, int end, boolean needData, Revlog.Inspector inspector) { + initOutline(); + final int indexSize = index.size(); + if (indexSize == 0) { + return; + } + if (end == TIP) { + end = indexSize - 1; + } + if (start == TIP) { + start = indexSize - 1; + } + if (start < 0 || start >= indexSize) { + throw new IllegalArgumentException("Bad left range boundary " + start); + } + if (end < start || end >= indexSize) { + throw new IllegalArgumentException("Bad right range boundary " + end); + } + // XXX may cache [start .. end] from index with a single read (pre-read) + + DataAccess daIndex = null, daData = null; + daIndex = getIndexStream(); + if (needData && !inline) { + daData = getDataStream(); + } + try { + byte[] nodeidBuf = new byte[20]; + byte[] lastData = null; + int i; + boolean extraReadsToBaseRev = false; + if (needData && index.get(start).baseRevision < start) { + i = index.get(start).baseRevision; + extraReadsToBaseRev = true; + } else { + i = start; + } + + daIndex.seek(inline ? (int) index.get(i).offset : i * REVLOGV1_RECORD_SIZE); + for (; i <= end; i++ ) { + long l = daIndex.readLong(); + @SuppressWarnings("unused") + long offset = l >>> 16; + @SuppressWarnings("unused") + int flags = (int) (l & 0X0FFFF); + int compressedLen = daIndex.readInt(); + int actualLen = daIndex.readInt(); + int baseRevision = daIndex.readInt(); + int linkRevision = daIndex.readInt(); + int parent1Revision = daIndex.readInt(); + int parent2Revision = daIndex.readInt(); + // Hg has 32 bytes here, uses 20 for nodeid, and keeps 12 last bytes empty + daIndex.readBytes(nodeidBuf, 0, 20); + daIndex.skip(12); + byte[] data = null; + if (needData) { + byte[] dataBuf = new byte[compressedLen]; + if (inline) { + daIndex.readBytes(dataBuf, 0, compressedLen); + } else { + daData.seek(index.get(i).offset); + daData.readBytes(dataBuf, 0, compressedLen); + } + if (dataBuf[0] == 0x78 /* 'x' */) { + try { + Inflater zlib = new Inflater(); // XXX Consider reuse of Inflater, and/or stream alternative + zlib.setInput(dataBuf, 0, compressedLen); + byte[] result = new byte[actualLen*2]; // FIXME need to use zlib.finished() instead + int resultLen = zlib.inflate(result); + zlib.end(); + data = new byte[resultLen]; + System.arraycopy(result, 0, data, 0, resultLen); + } catch (DataFormatException ex) { + ex.printStackTrace(); + data = new byte[0]; // FIXME need better failure strategy + } + } else if (dataBuf[0] == 0x75 /* 'u' */) { + data = new byte[dataBuf.length - 1]; + System.arraycopy(dataBuf, 1, data, 0, data.length); + } else { + // XXX Python impl in fact throws exception when there's not 'x', 'u' or '0' + // but I don't see reason not to return data as is + data = dataBuf; + } + // XXX + if (baseRevision != i) { // XXX not sure if this is the right way to detect a patch + // this is a patch + LinkedList patches = new LinkedList(); + int patchElementIndex = 0; + do { + PatchRecord pr = PatchRecord.read(data, patchElementIndex); + patches.add(pr); + patchElementIndex += 12 + pr.len; + } while (patchElementIndex < data.length); + // + byte[] baseRevContent = lastData; + data = apply(baseRevContent, actualLen, patches); + } + } else { + if (inline) { + daIndex.skip(compressedLen); + } + } + if (!extraReadsToBaseRev || i >= start) { + inspector.next(i, actualLen, baseRevision, linkRevision, parent1Revision, parent2Revision, nodeidBuf, data); + } + lastData = data; + } + } catch (IOException ex) { + throw new IllegalStateException(ex); // FIXME need better handling + } finally { + daIndex.done(); + if (daData != null) { + daData.done(); + } + } + } + + private void initOutline() { + if (index != null && !index.isEmpty()) { + return; + } + ArrayList res = new ArrayList(); + DataAccess da = getIndexStream(); + try { + int versionField = da.readInt(); + da.readInt(); // just to skip next 2 bytes of offset + flags + final int INLINEDATA = 1 << 16; + inline = (versionField & INLINEDATA) != 0; + long offset = 0; // first offset is always 0, thus Hg uses it for other purposes + while(true) { + int compressedLen = da.readInt(); + // 8+4 = 12 bytes total read here + @SuppressWarnings("unused") + int actualLen = da.readInt(); + int baseRevision = da.readInt(); + // 12 + 8 = 20 bytes read here +// int linkRevision = di.readInt(); +// int parent1Revision = di.readInt(); +// int parent2Revision = di.readInt(); +// byte[] nodeid = new byte[32]; + if (inline) { + res.add(new IndexEntry(offset + REVLOGV1_RECORD_SIZE * res.size(), baseRevision)); + da.skip(3*4 + 32 + compressedLen); // Check: 44 (skip) + 20 (read) = 64 (total RevlogNG record size) + } else { + res.add(new IndexEntry(offset, baseRevision)); + da.skip(3*4 + 32); + } + if (da.isEmpty()) { + // fine, done then + res.trimToSize(); + index = res; + break; + } else { + // start reading next record + long l = da.readLong(); + offset = l >>> 16; + } + } + } catch (IOException ex) { + ex.printStackTrace(); // log error + // too bad, no outline then. + index = Collections.emptyList(); + } finally { + da.done(); + } + + } + + + // perhaps, package-local or protected, if anyone else from low-level needs them + // XXX think over if we should keep offset in case of separate data file - we read the field anyway. Perhaps, distinct entry classes for Inline and non-inline indexes? + private static class IndexEntry { + public final long offset; // for separate .i and .d - copy of index record entry, for inline index - actual offset of the record in the .i file (record entry + revision * record size)) + //public final int length; // data past fixed record (need to decide whether including header size or not), and whether length is of compressed data or not + public final int baseRevision; + + public IndexEntry(long o, int baseRev) { + offset = o; + baseRevision = baseRev; + } + } + + // mpatch.c : apply() + // FIXME need to implement patch merge (fold, combine, gather and discard from aforementioned mpatch.[c|py]), also see Revlog and Mercurial PDF + /*package-local for HgBundle; until moved to better place*/static byte[] apply(byte[] baseRevisionContent, int outcomeLen, List patch) { + int last = 0, destIndex = 0; + if (outcomeLen == -1) { + outcomeLen = baseRevisionContent.length; + for (PatchRecord pr : patch) { + outcomeLen += pr.start - last + pr.len; + last = pr.end; + } + outcomeLen -= last; + last = 0; + } + byte[] rv = new byte[outcomeLen]; + for (PatchRecord pr : patch) { + System.arraycopy(baseRevisionContent, last, rv, destIndex, pr.start-last); + destIndex += pr.start - last; + System.arraycopy(pr.data, 0, rv, destIndex, pr.data.length); + destIndex += pr.data.length; + last = pr.end; + } + System.arraycopy(baseRevisionContent, last, rv, destIndex, baseRevisionContent.length - last); + return rv; + } + + // @see http://mercurial.selenic.com/wiki/BundleFormat, in Changelog group description + /*package-local*/ static class PatchRecord { // copy of struct frag from mpatch.c + /* + Given there are pr1 and pr2: + pr1.start to pr1.end will be replaced with pr's data (of pr1.len) + pr1.end to pr2.start gets copied from base + */ + int start, end, len; + byte[] data; + + // TODO consider PatchRecord that only records data position (absolute in data source), and acquires data as needed + private PatchRecord(int p1, int p2, int length, byte[] src) { + start = p1; + end = p2; + len = length; + data = src; + } + + /*package-local*/ static PatchRecord read(byte[] data, int offset) { + final int x = offset; // shorthand + int p1 = ((data[x] & 0xFF)<< 24) | ((data[x+1] & 0xFF) << 16) | ((data[x+2] & 0xFF) << 8) | (data[x+3] & 0xFF); + int p2 = ((data[x+4] & 0xFF) << 24) | ((data[x+5] & 0xFF) << 16) | ((data[x+6] & 0xFF) << 8) | (data[x+7] & 0xFF); + int len = ((data[x+8] & 0xFF) << 24) | ((data[x+9] & 0xFF) << 16) | ((data[x+10] & 0xFF) << 8) | (data[x+11] & 0xFF); + byte[] dataCopy = new byte[len]; + System.arraycopy(data, x+12, dataCopy, 0, len); + return new PatchRecord(p1, p2, len, dataCopy); + } + + /*package-local*/ static PatchRecord read(DataAccess da) throws IOException { + int p1 = da.readInt(); + int p2 = da.readInt(); + int len = da.readInt(); + byte[] src = new byte[len]; + da.readBytes(src, 0, len); + return new PatchRecord(p1, p2, len, src); + } + + + } +} diff -r 0d279bcc4442 -r 6f1b88693d48 src/org/tmatesoft/hg/repo/StatusCollector.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/org/tmatesoft/hg/repo/StatusCollector.java Mon Jan 24 03:14:45 2011 +0100 @@ -0,0 +1,352 @@ +/* + * Copyright (c) 2011 TMate Software Ltd + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; version 2 of the License. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * For information on how to redistribute this software under + * the terms of a license other than GNU General Public License + * contact TMate Software at support@svnkit.com + */ +package org.tmatesoft.hg.repo; + +import static org.tmatesoft.hg.repo.HgRepository.BAD_REVISION; +import static org.tmatesoft.hg.repo.HgRepository.TIP; + +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.LinkedHashMap; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.TreeSet; + +import org.tmatesoft.hg.core.Nodeid; + + +/** + * RevisionWalker? + * + * @author Artem Tikhomirov + * @author TMate Software Ltd. + */ +public class StatusCollector { + + private final HgRepository repo; + private final Map cache; // sparse array, in fact + + public StatusCollector(HgRepository hgRepo) { + this.repo = hgRepo; + cache = new HashMap(); + ManifestRevisionInspector emptyFakeState = new ManifestRevisionInspector(-1, -1); + emptyFakeState.begin(-1, null); + emptyFakeState.end(-1); // FIXME HgRepo.TIP == -1 as well, need to distinguish fake "prior to first" revision from "the very last" + cache.put(-1, emptyFakeState); + } + + public HgRepository getRepo() { + return repo; + } + + private ManifestRevisionInspector get(int rev) { + ManifestRevisionInspector i = cache.get(rev); + if (i == null) { + i = new ManifestRevisionInspector(rev, rev); + cache.put(rev, i); + repo.getManifest().walk(rev, rev, i); + } + return i; + } + + /*package-local*/ ManifestRevisionInspector raw(int rev) { + return get(rev); + } + + // hg status --change + public void change(int rev, Inspector inspector) { + int[] parents = new int[2]; + repo.getChangelog().parents(rev, parents, null, null); + walk(parents[0], rev, inspector); + } + + // I assume revision numbers are the same for changelog and manifest - here + // user would like to pass changelog revision numbers, and I use them directly to walk manifest. + // if this assumption is wrong, fix this (lookup manifest revisions from changeset). + public void walk(int rev1, int rev2, Inspector inspector) { + if (rev1 == rev2) { + throw new IllegalArgumentException(); + } + if (inspector == null) { + throw new IllegalArgumentException(); + } + if (inspector instanceof Record) { + ((Record) inspector).init(rev1, rev2, this); + } + if (rev1 == TIP) { + rev1 = repo.getManifest().getRevisionCount() - 1; + } + if (rev2 == TIP) { + rev2 = repo.getManifest().getRevisionCount() - 1; // XXX add Revlog.tip() func ? + } + // in fact, rev1 and rev2 are often next (or close) to each other, + // thus, we can optimize Manifest reads here (manifest.walk(rev1, rev2)) + ManifestRevisionInspector r1, r2; + if (!cache.containsKey(rev1) && !cache.containsKey(rev2) && Math.abs(rev1 - rev2) < 5 /*subjective equivalent of 'close enough'*/) { + int minRev = rev1 < rev2 ? rev1 : rev2; + int maxRev = minRev == rev1 ? rev2 : rev1; + r1 = r2 = new ManifestRevisionInspector(minRev, maxRev); + for (int i = minRev; i <= maxRev; i++) { + cache.put(i, r1); + } + repo.getManifest().walk(minRev, maxRev, r1); + } else { + r1 = get(rev1); + r2 = get(rev2); + } + + TreeSet r1Files = new TreeSet(r1.files(rev1)); + for (String fname : r2.files(rev2)) { + if (r1Files.remove(fname)) { + Nodeid nidR1 = r1.nodeid(rev1, fname); + Nodeid nidR2 = r2.nodeid(rev2, fname); + String flagsR1 = r1.flags(rev1, fname); + String flagsR2 = r2.flags(rev2, fname); + if (nidR1.equals(nidR2) && ((flagsR2 == null && flagsR1 == null) || flagsR2.equals(flagsR1))) { + inspector.clean(fname); + } else { + inspector.modified(fname); + } + } else { + inspector.added(fname); + } + } + for (String left : r1Files) { + inspector.removed(left); + } + // inspector.done() if useful e.g. in UI client + } + + public Record status(int rev1, int rev2) { + Record rv = new Record(); + walk(rev1, rev2, rv); + return rv; + } + + public interface Inspector { + void modified(String fname); + void added(String fname); + // XXX need to specify whether StatusCollector invokes added() along with copied or not! + void copied(String fnameOrigin, String fnameAdded); // if copied files of no interest, should delegate to self.added(fnameAdded); + void removed(String fname); + void clean(String fname); + void missing(String fname); // aka deleted (tracked by Hg, but not available in FS any more + void unknown(String fname); // not tracked + void ignored(String fname); + } + + // XXX for r1..r2 status, only modified, added, removed (and perhaps, clean) make sense + // XXX Need to specify whether copy targets are in added or not (@see Inspector#copied above) + public static class Record implements Inspector { + private List modified, added, removed, clean, missing, unknown, ignored; + private Map copied; + + private int startRev, endRev; + private StatusCollector statusHelper; + + // XXX StatusCollector may additionally initialize Record instance to speed lookup of changed file revisions + // here I need access to ManifestRevisionInspector via #raw(). Perhaps, non-static class (to get + // implicit reference to StatusCollector) may be better? + // Since users may want to reuse Record instance we've once created (and initialized), we need to + // ensure functionality is correct for each/any call (#walk checks instanceof Record and fixes it up) + // Perhaps, distinct helper (sc.getRevisionHelper().nodeid(fname)) would be better, just not clear + // how to supply [start..end] values there easily + /*package-local*/void init(int startRevision, int endRevision, StatusCollector self) { + startRev = startRevision; + endRev = endRevision; + statusHelper = self; + } + + public Nodeid nodeidBeforeChange(String fname) { + if (statusHelper == null || startRev == BAD_REVISION) { + return null; + } + if ((modified == null || !modified.contains(fname)) && (removed == null || !removed.contains(fname))) { + return null; + } + return statusHelper.raw(startRev).nodeid(startRev, fname); + } + public Nodeid nodeidAfterChange(String fname) { + if (statusHelper == null || endRev == BAD_REVISION) { + return null; + } + if ((modified == null || !modified.contains(fname)) && (added == null || !added.contains(fname))) { + return null; + } + return statusHelper.raw(endRev).nodeid(endRev, fname); + } + + public List getModified() { + return proper(modified); + } + + public List getAdded() { + return proper(added); + } + + public List getRemoved() { + return proper(removed); + } + + public Map getCopied() { + if (copied == null) { + return Collections.emptyMap(); + } + return Collections.unmodifiableMap(copied); + } + + public List getClean() { + return proper(clean); + } + + public List getMissing() { + return proper(missing); + } + + public List getUnknown() { + return proper(unknown); + } + + public List getIgnored() { + return proper(ignored); + } + + private List proper(List l) { + if (l == null) { + return Collections.emptyList(); + } + return Collections.unmodifiableList(l); + } + + // + // + + public void modified(String fname) { + modified = doAdd(modified, fname); + } + + public void added(String fname) { + added = doAdd(added, fname); + } + + public void copied(String fnameOrigin, String fnameAdded) { + if (copied == null) { + copied = new LinkedHashMap(); + } + added(fnameAdded); + copied.put(fnameAdded, fnameOrigin); + } + + public void removed(String fname) { + removed = doAdd(removed, fname); + } + + public void clean(String fname) { + clean = doAdd(clean, fname); + } + + public void missing(String fname) { + missing = doAdd(missing, fname); + } + + public void unknown(String fname) { + unknown = doAdd(unknown, fname); + } + + public void ignored(String fname) { + ignored = doAdd(ignored, fname); + } + + private static List doAdd(List l, String s) { + if (l == null) { + l = new LinkedList(); + } + l.add(s); + return l; + } + } + + // XXX in fact, indexed access brings more trouble than benefits, get rid of it? Distinct instance per revision is good enough + public /*XXX private, actually. Made public unless repo.statusLocal finds better place*/ static final class ManifestRevisionInspector implements HgManifest.Inspector { + private final HashMap[] idsMap; + private final HashMap[] flagsMap; + private final int baseRevision; + private int r = -1; // cursor + + /** + * [minRev, maxRev] + * [-1,-1] also accepted (for fake empty instance) + * @param minRev - inclusive + * @param maxRev - inclusive + */ + @SuppressWarnings("unchecked") + public ManifestRevisionInspector(int minRev, int maxRev) { + baseRevision = minRev; + int range = maxRev - minRev + 1; + idsMap = new HashMap[range]; + flagsMap = new HashMap[range]; + } + + public Collection files(int rev) { + if (rev < baseRevision || rev >= baseRevision + idsMap.length) { + throw new IllegalArgumentException(); + } + return idsMap[rev - baseRevision].keySet(); + } + + public Nodeid nodeid(int rev, String fname) { + if (rev < baseRevision || rev >= baseRevision + idsMap.length) { + throw new IllegalArgumentException(); + } + return idsMap[rev - baseRevision].get(fname); + } + + public String flags(int rev, String fname) { + if (rev < baseRevision || rev >= baseRevision + idsMap.length) { + throw new IllegalArgumentException(); + } + return flagsMap[rev - baseRevision].get(fname); + } + + // + + public boolean next(Nodeid nid, String fname, String flags) { + idsMap[r].put(fname, nid); + flagsMap[r].put(fname, flags); + return true; + } + + public boolean end(int revision) { + assert revision == r + baseRevision; + r = -1; + return revision+1 < baseRevision + idsMap.length; + } + + public boolean begin(int revision, Nodeid nid) { + if (revision < baseRevision || revision >= baseRevision + idsMap.length) { + throw new IllegalArgumentException(); + } + r = revision - baseRevision; + idsMap[r] = new HashMap(); + flagsMap[r] = new HashMap(); + return true; + } + } + +} diff -r 0d279bcc4442 -r 6f1b88693d48 src/org/tmatesoft/hg/repo/WorkingCopyStatusCollector.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/org/tmatesoft/hg/repo/WorkingCopyStatusCollector.java Mon Jan 24 03:14:45 2011 +0100 @@ -0,0 +1,233 @@ +/* + * Copyright (c) 2011 TMate Software Ltd + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; version 2 of the License. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * For information on how to redistribute this software under + * the terms of a license other than GNU General Public License + * contact TMate Software at support@svnkit.com + */ +package org.tmatesoft.hg.repo; + +import static org.tmatesoft.hg.repo.HgRepository.BAD_REVISION; +import static org.tmatesoft.hg.repo.HgRepository.TIP; + +import java.io.BufferedInputStream; +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.util.Collections; +import java.util.Set; +import java.util.TreeSet; + +import org.tmatesoft.hg.core.Nodeid; +import org.tmatesoft.hg.util.FileWalker; + +/** + * + * @author Artem Tikhomirov + * @author TMate Software Ltd. + */ +public class WorkingCopyStatusCollector { + + private final HgRepository repo; + private final FileWalker repoWalker; + private HgDirstate dirstate; + private StatusCollector baseRevisionCollector; + + public WorkingCopyStatusCollector(HgRepository hgRepo) { + this(hgRepo, hgRepo.createWorkingDirWalker()); + } + + WorkingCopyStatusCollector(HgRepository hgRepo, FileWalker hgRepoWalker) { + this.repo = hgRepo; + this.repoWalker = hgRepoWalker; + } + + /** + * Optionally, supply a collector instance that may cache (or have already cached) base revision + * @param sc may be null + */ + public void setBaseRevisionCollector(StatusCollector sc) { + baseRevisionCollector = sc; + } + + private HgDirstate getDirstate() { + if (dirstate == null) { + dirstate = repo.loadDirstate(); + } + return dirstate; + } + + // may be invoked few times + public void walk(int baseRevision, StatusCollector.Inspector inspector) { + final HgIgnore hgIgnore = repo.loadIgnore(); + TreeSet knownEntries = getDirstate().all(); + final boolean isTipBase; + if (baseRevision == TIP) { + baseRevision = repo.getManifest().getRevisionCount() - 1; + isTipBase = true; + } else { + isTipBase = baseRevision == repo.getManifest().getRevisionCount() - 1; + } + StatusCollector.ManifestRevisionInspector collect = null; + Set baseRevFiles = Collections.emptySet(); + if (!isTipBase) { + if (baseRevisionCollector != null) { + collect = baseRevisionCollector.raw(baseRevision); + } else { + collect = new StatusCollector.ManifestRevisionInspector(baseRevision, baseRevision); + repo.getManifest().walk(baseRevision, baseRevision, collect); + } + baseRevFiles = new TreeSet(collect.files(baseRevision)); + } + if (inspector instanceof StatusCollector.Record) { + StatusCollector sc = baseRevisionCollector == null ? new StatusCollector(repo) : baseRevisionCollector; + ((StatusCollector.Record) inspector).init(baseRevision, BAD_REVISION, sc); + } + repoWalker.reset(); + while (repoWalker.hasNext()) { + repoWalker.next(); + String fname = repoWalker.name(); + File f = repoWalker.file(); + if (hgIgnore.isIgnored(fname)) { + inspector.ignored(fname); + } else if (knownEntries.remove(fname)) { + // modified, added, removed, clean + if (collect != null) { // need to check against base revision, not FS file + Nodeid nid1 = collect.nodeid(baseRevision, fname); + String flags = collect.flags(baseRevision, fname); + checkLocalStatusAgainstBaseRevision(baseRevFiles, nid1, flags, fname, f, inspector); + baseRevFiles.remove(fname); + } else { + checkLocalStatusAgainstFile(fname, f, inspector); + } + } else { + inspector.unknown(fname); + } + } + if (collect != null) { + for (String r : baseRevFiles) { + inspector.removed(r); + } + } + for (String m : knownEntries) { + // missing known file from a working dir + if (getDirstate().checkRemoved(m) == null) { + // not removed from the repository = 'deleted' + inspector.missing(m); + } else { + // removed from the repo + inspector.removed(m); + } + } + } + + public StatusCollector.Record status(int baseRevision) { + StatusCollector.Record rv = new StatusCollector.Record(); + walk(baseRevision, rv); + return rv; + } + + //******************************************** + + + private void checkLocalStatusAgainstFile(String fname, File f, StatusCollector.Inspector inspector) { + HgDirstate.Record r; + if ((r = getDirstate().checkNormal(fname)) != null) { + // either clean or modified + if (f.lastModified() / 1000 == r.time && r.size == f.length()) { + inspector.clean(fname); + } else { + // FIXME check actual content to avoid false modified files + inspector.modified(fname); + } + } else if ((r = getDirstate().checkAdded(fname)) != null) { + if (r.name2 == null) { + inspector.added(fname); + } else { + inspector.copied(r.name2, fname); + } + } else if ((r = getDirstate().checkRemoved(fname)) != null) { + inspector.removed(fname); + } else if ((r = getDirstate().checkMerged(fname)) != null) { + inspector.modified(fname); + } + } + + // XXX refactor checkLocalStatus methods in more OO way + private void checkLocalStatusAgainstBaseRevision(Set baseRevNames, Nodeid nid1, String flags, String fname, File f, StatusCollector.Inspector inspector) { + // fname is in the dirstate, either Normal, Added, Removed or Merged + HgDirstate.Record r; + if (nid1 == null) { + // normal: added? + // added: not known at the time of baseRevision, shall report + // merged: was not known, report as added? + if ((r = getDirstate().checkAdded(fname)) != null) { + if (r.name2 != null && baseRevNames.contains(r.name2)) { + baseRevNames.remove(r.name2); + inspector.copied(r.name2, fname); + return; + } + // fall-through, report as added + } else if (getDirstate().checkRemoved(fname) != null) { + // removed: removed file was not known at the time of baseRevision, and we should not report it as removed + return; + } + inspector.added(fname); + } else { + // was known; check whether clean or modified + // when added - seems to be the case of a file added once again, hence need to check if content is different + if ((r = getDirstate().checkNormal(fname)) != null || (r = getDirstate().checkMerged(fname)) != null || (r = getDirstate().checkAdded(fname)) != null) { + // either clean or modified + HgDataFile fileNode = repo.getFileNode(fname); + final int lengthAtRevision = fileNode.length(nid1); + if (r.size /* XXX File.length() ?! */ != lengthAtRevision || flags != todoGenerateFlags(fname /*java.io.File*/)) { + inspector.modified(fname); + } else { + // check actual content to see actual changes + // XXX consider adding HgDataDile.compare(File/byte[]/whatever) operation to optimize comparison + if (areTheSame(f, fileNode.content(nid1))) { + inspector.clean(fname); + } else { + inspector.modified(fname); + } + } + } + // only those left in idsMap after processing are reported as removed + } + + // TODO think over if content comparison may be done more effectively by e.g. calculating nodeid for a local file and comparing it with nodeid from manifest + // we don't need to tell exact difference, hash should be enough to detect difference, and it doesn't involve reading historical file content, and it's relatively + // cheap to calc hash on a file (no need to keep it completely in memory). OTOH, if I'm right that the next approach is used for nodeids: + // changeset nodeid + hash(actual content) => entry (Nodeid) in the next Manifest + // then it's sufficient to check parents from dirstate, and if they do not match parents from file's baseRevision (non matching parents means different nodeids). + // The question is whether original Hg treats this case (same content, different parents and hence nodeids) as 'modified' or 'clean' + } + + private static String todoGenerateFlags(String fname) { + // FIXME implement + return null; + } + private static boolean areTheSame(File f, byte[] data) { + try { + BufferedInputStream is = new BufferedInputStream(new FileInputStream(f)); + int i = 0; + while (i < data.length && data[i] == is.read()) { + i++; // increment only for successful match, otherwise won't tell last byte in data was the same as read from the stream + } + return i == data.length && is.read() == -1; // although data length is expected to be the same (see caller), check that we reached EOF, no more data left. + } catch (IOException ex) { + ex.printStackTrace(); // log warn + } + return false; + } + +} diff -r 0d279bcc4442 -r 6f1b88693d48 src/org/tmatesoft/hg/repo/package.html --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/org/tmatesoft/hg/repo/package.html Mon Jan 24 03:14:45 2011 +0100 @@ -0,0 +1,5 @@ + + +Low-level API operations + + \ No newline at end of file diff -r 0d279bcc4442 -r 6f1b88693d48 src/org/tmatesoft/hg/util/FileWalker.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/org/tmatesoft/hg/util/FileWalker.java Mon Jan 24 03:14:45 2011 +0100 @@ -0,0 +1,106 @@ +/* + * Copyright (c) 2011 TMate Software Ltd + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; version 2 of the License. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * For information on how to redistribute this software under + * the terms of a license other than GNU General Public License + * contact TMate Software at support@svnkit.com + */ +package org.tmatesoft.hg.util; + +import java.io.File; +import java.util.LinkedList; +import java.util.NoSuchElementException; + +/** + * + * @author Artem Tikhomirov + * @author TMate Software Ltd. + */ +public class FileWalker { + + private final File startDir; + private final LinkedList dirQueue; + private final LinkedList fileQueue; + private File nextFile; + private String nextPath; + + // FilenameFilter is used in a non-standard way - first argument, dir, is always startDir, + // while second arg, name, is startDir-relative path to the file in question + public FileWalker(File startDir) { + this.startDir = startDir; + dirQueue = new LinkedList(); + fileQueue = new LinkedList(); + reset(); + } + + public void reset() { + fileQueue.clear(); + dirQueue.clear(); + dirQueue.add(startDir); + nextFile = null; + nextPath = null; + } + + public boolean hasNext() { + return fill(); + } + + public void next() { + if (!fill()) { + throw new NoSuchElementException(); + } + nextFile = fileQueue.removeFirst(); + nextPath = path(nextFile); + } + + public String name() { + return nextPath; + } + + public File file() { + return nextFile; + } + + private String path(File f) { + // XXX LocalHgRepo#normalize + String p = f.getPath().substring(startDir.getPath().length() + 1); + return p.replace('\\', '/').replace("//", "/"); + } + + private File[] listFiles(File f) { + // in case we need to solve os-related file issues (mac with some encodings?) + return f.listFiles(); + } + + // return true when fill added any elements to fileQueue. + private boolean fill() { + while (fileQueue.isEmpty()) { + if (dirQueue.isEmpty()) { + return false; + } + while (!dirQueue.isEmpty()) { + File dir = dirQueue.removeFirst(); + for (File f : listFiles(dir)) { + if (f.isDirectory()) { + if (!".hg".equals(f.getName())) { + dirQueue.addLast(f); + } + } else { + fileQueue.addLast(f); + } + } + break; + } + } + return !fileQueue.isEmpty(); + } +} diff -r 0d279bcc4442 -r 6f1b88693d48 src/org/tmatesoft/hg/util/PathRewrite.java --- a/src/org/tmatesoft/hg/util/PathRewrite.java Sun Jan 23 04:06:18 2011 +0100 +++ b/src/org/tmatesoft/hg/util/PathRewrite.java Mon Jan 24 03:14:45 2011 +0100 @@ -16,6 +16,9 @@ */ package org.tmatesoft.hg.util; +import java.util.LinkedList; +import java.util.List; + /** * * @author Artem Tikhomirov @@ -24,4 +27,27 @@ public interface PathRewrite { public String rewrite(String path); + + public class Composite implements PathRewrite { + private List chain; + + public Composite(PathRewrite... e) { + LinkedList r = new LinkedList(); + for (int i = (e == null ? -1 : e.length); i >=0; i--) { + r.addFirst(e[i]); + } + chain = r; + } + public Composite chain(PathRewrite e) { + chain.add(e); + return this; + } + + public String rewrite(String path) { + for (PathRewrite pr : chain) { + path = pr.rewrite(path); + } + return path; + } + } } diff -r 0d279bcc4442 -r 6f1b88693d48 test/org/tmatesoft/hg/test/LogOutputParser.java --- a/test/org/tmatesoft/hg/test/LogOutputParser.java Sun Jan 23 04:06:18 2011 +0100 +++ b/test/org/tmatesoft/hg/test/LogOutputParser.java Mon Jan 24 03:14:45 2011 +0100 @@ -21,7 +21,8 @@ import java.util.regex.Matcher; import java.util.regex.Pattern; -import com.tmate.hgkit.ll.HgRepository; +import org.tmatesoft.hg.repo.HgRepository; + /** * diff -r 0d279bcc4442 -r 6f1b88693d48 test/org/tmatesoft/hg/test/ManifestOutputParser.java --- a/test/org/tmatesoft/hg/test/ManifestOutputParser.java Sun Jan 23 04:06:18 2011 +0100 +++ b/test/org/tmatesoft/hg/test/ManifestOutputParser.java Mon Jan 24 03:14:45 2011 +0100 @@ -21,9 +21,9 @@ import java.util.regex.Matcher; import java.util.regex.Pattern; +import org.tmatesoft.hg.core.Nodeid; import org.tmatesoft.hg.core.Path; -import com.tmate.hgkit.ll.Nodeid; /** * diff -r 0d279bcc4442 -r 6f1b88693d48 test/org/tmatesoft/hg/test/TestHistory.java --- a/test/org/tmatesoft/hg/test/TestHistory.java Sun Jan 23 04:06:18 2011 +0100 +++ b/test/org/tmatesoft/hg/test/TestHistory.java Mon Jan 24 03:14:45 2011 +0100 @@ -22,10 +22,10 @@ import org.tmatesoft.hg.core.Cset; import org.tmatesoft.hg.core.LogCommand; +import org.tmatesoft.hg.repo.HgRepository; +import org.tmatesoft.hg.repo.Lookup; import org.tmatesoft.hg.test.LogOutputParser.Record; -import com.tmate.hgkit.fs.RepositoryLookup; -import com.tmate.hgkit.ll.HgRepository; /** * @@ -39,7 +39,7 @@ private LogOutputParser changelogParser; public static void main(String[] args) throws Exception { - TestHistory th = new TestHistory(new RepositoryLookup().detectFromWorkingDir()); + TestHistory th = new TestHistory(new Lookup().detectFromWorkingDir()); th.testCompleteLog(); } diff -r 0d279bcc4442 -r 6f1b88693d48 test/org/tmatesoft/hg/test/TestManifest.java --- a/test/org/tmatesoft/hg/test/TestManifest.java Sun Jan 23 04:06:18 2011 +0100 +++ b/test/org/tmatesoft/hg/test/TestManifest.java Mon Jan 24 03:14:45 2011 +0100 @@ -16,19 +16,19 @@ */ package org.tmatesoft.hg.test; -import static com.tmate.hgkit.ll.HgRepository.TIP; +import static org.tmatesoft.hg.repo.HgRepository.TIP; import java.util.LinkedHashMap; import java.util.LinkedList; import java.util.Map; import org.tmatesoft.hg.core.LogCommand.FileRevision; +import org.tmatesoft.hg.core.Nodeid; import org.tmatesoft.hg.core.Path; import org.tmatesoft.hg.core.RepositoryTreeWalker; +import org.tmatesoft.hg.repo.HgRepository; +import org.tmatesoft.hg.repo.Lookup; -import com.tmate.hgkit.fs.RepositoryLookup; -import com.tmate.hgkit.ll.HgRepository; -import com.tmate.hgkit.ll.Nodeid; /** * @@ -53,7 +53,7 @@ }; public static void main(String[] args) throws Exception { - HgRepository repo = new RepositoryLookup().detectFromWorkingDir(); + HgRepository repo = new Lookup().detectFromWorkingDir(); TestManifest tm = new TestManifest(repo); tm.testTip(); tm.testFirstRevision(); diff -r 0d279bcc4442 -r 6f1b88693d48 test/org/tmatesoft/hg/test/TestStatus.java --- a/test/org/tmatesoft/hg/test/TestStatus.java Sun Jan 23 04:06:18 2011 +0100 +++ b/test/org/tmatesoft/hg/test/TestStatus.java Mon Jan 24 03:14:45 2011 +0100 @@ -16,20 +16,18 @@ */ package org.tmatesoft.hg.test; -import static com.tmate.hgkit.ll.HgRepository.TIP; +import static org.tmatesoft.hg.repo.HgRepository.TIP; -import java.io.File; import java.util.Collection; import java.util.LinkedList; import java.util.List; import org.tmatesoft.hg.core.StatusCommand; +import org.tmatesoft.hg.repo.HgRepository; +import org.tmatesoft.hg.repo.Lookup; +import org.tmatesoft.hg.repo.StatusCollector; +import org.tmatesoft.hg.repo.WorkingCopyStatusCollector; -import com.tmate.hgkit.fs.FileWalker; -import com.tmate.hgkit.fs.RepositoryLookup; -import com.tmate.hgkit.ll.HgRepository; -import com.tmate.hgkit.ll.StatusCollector; -import com.tmate.hgkit.ll.WorkingCopyStatusCollector; /** * @@ -43,7 +41,7 @@ private ExecHelper eh; public static void main(String[] args) throws Exception { - HgRepository repo = new RepositoryLookup().detectFromWorkingDir(); + HgRepository repo = new Lookup().detectFromWorkingDir(); TestStatus test = new TestStatus(repo); test.testLowLevel(); test.testStatusCommand(); @@ -56,7 +54,7 @@ } public void testLowLevel() throws Exception { - final WorkingCopyStatusCollector wcc = new WorkingCopyStatusCollector(repo, new FileWalker(new File(System.getProperty("user.dir")))); + final WorkingCopyStatusCollector wcc = new WorkingCopyStatusCollector(repo); statusParser.reset(); eh.run("hg", "status", "-A"); StatusCollector.Record r = wcc.status(HgRepository.TIP);