Mercurial > jhg
comparison src/org/tmatesoft/hg/internal/BundleGenerator.java @ 651:6e98d34eaca8
Push: tests (push to empty, push changes, respect secret)
| author | Artem Tikhomirov <tikhomirov.artem@gmail.com> |
|---|---|
| date | Mon, 01 Jul 2013 21:19:53 +0200 |
| parents | 14dac192aa26 |
| children | fba85bc1dfb8 |
comparison
equal
deleted
inserted
replaced
| 650:3b275cc2d2aa | 651:6e98d34eaca8 |
|---|---|
| 66 for (Nodeid n : changesets) { | 66 for (Nodeid n : changesets) { |
| 67 clogRevsVector.add(clog.getRevisionIndex(n)); | 67 clogRevsVector.add(clog.getRevisionIndex(n)); |
| 68 } | 68 } |
| 69 clogRevsVector.sort(true); | 69 clogRevsVector.sort(true); |
| 70 final int[] clogRevs = clogRevsVector.toArray(); | 70 final int[] clogRevs = clogRevsVector.toArray(); |
| 71 System.out.printf("Changelog: %s\n", Arrays.toString(clogRevs)); | |
| 72 final IntMap<Nodeid> clogMap = new IntMap<Nodeid>(changesets.size()); | 71 final IntMap<Nodeid> clogMap = new IntMap<Nodeid>(changesets.size()); |
| 73 final IntVector manifestRevs = new IntVector(changesets.size(), 0); | 72 final IntVector manifestRevs = new IntVector(changesets.size(), 0); |
| 74 final List<HgDataFile> files = new ArrayList<HgDataFile>(); | 73 final List<HgDataFile> files = new ArrayList<HgDataFile>(); |
| 75 clog.range(new HgChangelog.Inspector() { | 74 clog.range(new HgChangelog.Inspector() { |
| 76 private Set<String> seenFiles = new HashSet<String>(); | 75 private Set<String> seenFiles = new HashSet<String>(); |
| 86 files.add(df); | 85 files.add(df); |
| 87 } | 86 } |
| 88 } | 87 } |
| 89 }, clogRevs); | 88 }, clogRevs); |
| 90 manifestRevs.sort(true); | 89 manifestRevs.sort(true); |
| 91 System.out.printf("Manifest: %s\n", Arrays.toString(manifestRevs.toArray(true))); | 90 // |
| 92 /////////////// | 91 final File bundleFile = File.createTempFile("hg4j-", ".bundle"); |
| 93 for (HgDataFile df : sortedByName(files)) { | |
| 94 RevlogStream s = repo.getImplAccess().getStream(df); | |
| 95 final IntVector fileRevs = new IntVector(); | |
| 96 s.iterate(0, TIP, false, new RevlogStream.Inspector() { | |
| 97 | |
| 98 public void next(int revisionIndex, int actualLen, int baseRevision, int linkRevision, int parent1Revision, int parent2Revision, byte[] nodeid, DataAccess data) throws HgRuntimeException { | |
| 99 if (Arrays.binarySearch(clogRevs, linkRevision) >= 0) { | |
| 100 fileRevs.add(revisionIndex); | |
| 101 } | |
| 102 } | |
| 103 }); | |
| 104 fileRevs.sort(true); | |
| 105 System.out.printf("%s: %s\n", df.getPath(), Arrays.toString(fileRevs.toArray(true))); | |
| 106 } | |
| 107 if (Boolean.FALSE.booleanValue()) { | |
| 108 return null; | |
| 109 } | |
| 110 /////////////// | |
| 111 // | |
| 112 final File bundleFile = File.createTempFile("hg4j-", "bundle"); | |
| 113 final FileOutputStream osBundle = new FileOutputStream(bundleFile); | 92 final FileOutputStream osBundle = new FileOutputStream(bundleFile); |
| 114 final OutputStreamSerializer outRaw = new OutputStreamSerializer(osBundle); | 93 final OutputStreamSerializer outRaw = new OutputStreamSerializer(osBundle); |
| 115 outRaw.write("HG10UN".getBytes(), 0, 6); | 94 outRaw.write("HG10UN".getBytes(), 0, 6); |
| 116 // | 95 // |
| 117 RevlogStream clogStream = repo.getImplAccess().getChangelogStream(); | 96 RevlogStream clogStream = repo.getImplAccess().getChangelogStream(); |
| 185 private byte[] prevContent; | 164 private byte[] prevContent; |
| 186 private int startParent; | 165 private int startParent; |
| 187 | 166 |
| 188 public ChunkGenerator(DataSerializer dataSerializer, IntMap<Nodeid> clogNodeidMap) { | 167 public ChunkGenerator(DataSerializer dataSerializer, IntMap<Nodeid> clogNodeidMap) { |
| 189 ds = dataSerializer; | 168 ds = dataSerializer; |
| 190 parentMap = new IntMap<Nodeid>(clogNodeidMap.size());; | 169 parentMap = new IntMap<Nodeid>(clogNodeidMap.size()); |
| 191 clogMap = clogNodeidMap; | 170 clogMap = clogNodeidMap; |
| 192 } | 171 } |
| 193 | 172 |
| 194 public void iterate(RevlogStream s, int[] revisions) throws HgRuntimeException { | 173 public void iterate(RevlogStream s, int[] revisions) throws HgRuntimeException { |
| 195 int[] p = s.parents(revisions[0], new int[2]); | 174 int[] p = s.parents(revisions[0], new int[2]); |
| 201 } else { | 180 } else { |
| 202 revs2read = new int[revisions.length + 1]; | 181 revs2read = new int[revisions.length + 1]; |
| 203 revs2read[0] = startParent; | 182 revs2read[0] = startParent; |
| 204 System.arraycopy(revisions, 0, revs2read, 1, revisions.length); | 183 System.arraycopy(revisions, 0, revs2read, 1, revisions.length); |
| 205 } | 184 } |
| 185 // FIXME this is a hack to fill parentsMap with | |
| 186 // parents of elements that we are not going to meet with regular | |
| 187 // iteration, e.g. changes from a different branch (with some older parent), | |
| 188 // scenario: two revisions added to two different branches | |
| 189 // revisions[10, 11], parents(10) == 9, parents(11) == 7 | |
| 190 // revs2read == [9,10,11], and parentsMap lacks entry for parent rev7. | |
| 191 fillMissingParentsMap(s, revisions); | |
| 206 s.iterate(revs2read, true, this); | 192 s.iterate(revs2read, true, this); |
| 193 } | |
| 194 | |
| 195 private void fillMissingParentsMap(RevlogStream s, int[] revisions) throws HgRuntimeException { | |
| 196 int[] p = new int[2]; | |
| 197 for (int i = 1; i < revisions.length; i++) { | |
| 198 s.parents(revisions[i], p); | |
| 199 if (p[0] != NO_REVISION && Arrays.binarySearch(revisions, p[0]) < 0) { | |
| 200 parentMap.put(p[0], Nodeid.fromBinary(s.nodeid(p[0]), 0)); | |
| 201 } | |
| 202 if (p[1] != NO_REVISION && Arrays.binarySearch(revisions, p[1]) < 0) { | |
| 203 parentMap.put(p[1], Nodeid.fromBinary(s.nodeid(p[1]), 0)); | |
| 204 } | |
| 205 } | |
| 207 } | 206 } |
| 208 | 207 |
| 209 public void next(int revisionIndex, int actualLen, int baseRevision, int linkRevision, int parent1Revision, int parent2Revision, byte[] nodeid, DataAccess data) throws HgRuntimeException { | 208 public void next(int revisionIndex, int actualLen, int baseRevision, int linkRevision, int parent1Revision, int parent2Revision, byte[] nodeid, DataAccess data) throws HgRuntimeException { |
| 210 try { | 209 try { |
| 211 parentMap.put(revisionIndex, Nodeid.fromBinary(nodeid, 0)); | 210 parentMap.put(revisionIndex, Nodeid.fromBinary(nodeid, 0)); |
