Mercurial > jhg
comparison src/org/tmatesoft/hg/internal/BundleGenerator.java @ 644:1deea2f33218
Push: phase1 - prepare bundle with changes
author | Artem Tikhomirov <tikhomirov.artem@gmail.com> |
---|---|
date | Wed, 19 Jun 2013 16:04:24 +0200 |
parents | |
children | 14dac192aa26 |
comparison
equal
deleted
inserted
replaced
643:a8ce405da1f5 | 644:1deea2f33218 |
---|---|
1 /* | |
2 * Copyright (c) 2013 TMate Software Ltd | |
3 * | |
4 * This program is free software; you can redistribute it and/or modify | |
5 * it under the terms of the GNU General Public License as published by | |
6 * the Free Software Foundation; version 2 of the License. | |
7 * | |
8 * This program is distributed in the hope that it will be useful, | |
9 * but WITHOUT ANY WARRANTY; without even the implied warranty of | |
10 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
11 * GNU General Public License for more details. | |
12 * | |
13 * For information on how to redistribute this software under | |
14 * the terms of a license other than GNU General Public License | |
15 * contact TMate Software at support@hg4j.com | |
16 */ | |
17 package org.tmatesoft.hg.internal; | |
18 | |
19 import static org.tmatesoft.hg.repo.HgRepository.NO_REVISION; | |
20 import static org.tmatesoft.hg.repo.HgRepository.TIP; | |
21 | |
22 import java.io.File; | |
23 import java.io.FileOutputStream; | |
24 import java.io.IOException; | |
25 import java.io.OutputStream; | |
26 import java.util.ArrayList; | |
27 import java.util.Arrays; | |
28 import java.util.Collection; | |
29 import java.util.Collections; | |
30 import java.util.Comparator; | |
31 import java.util.List; | |
32 | |
33 import org.tmatesoft.hg.console.Bundle; | |
34 import org.tmatesoft.hg.core.HgIOException; | |
35 import org.tmatesoft.hg.core.Nodeid; | |
36 import org.tmatesoft.hg.internal.Patch.PatchDataSource; | |
37 import org.tmatesoft.hg.repo.HgBundle; | |
38 import org.tmatesoft.hg.repo.HgChangelog; | |
39 import org.tmatesoft.hg.repo.HgChangelog.RawChangeset; | |
40 import org.tmatesoft.hg.repo.HgDataFile; | |
41 import org.tmatesoft.hg.repo.HgInternals; | |
42 import org.tmatesoft.hg.repo.HgInvalidControlFileException; | |
43 import org.tmatesoft.hg.repo.HgLookup; | |
44 import org.tmatesoft.hg.repo.HgManifest; | |
45 import org.tmatesoft.hg.repo.HgRepository; | |
46 import org.tmatesoft.hg.repo.HgRuntimeException; | |
47 | |
48 /** | |
49 * @see http://mercurial.selenic.com/wiki/BundleFormat | |
50 * @author Artem Tikhomirov | |
51 * @author TMate Software Ltd. | |
52 */ | |
53 public class BundleGenerator { | |
54 | |
55 private final Internals repo; | |
56 | |
57 public BundleGenerator(Internals hgRepo) { | |
58 repo = hgRepo; | |
59 } | |
60 | |
61 public File create(List<Nodeid> changesets) throws HgIOException, IOException { | |
62 final HgChangelog clog = repo.getRepo().getChangelog(); | |
63 final HgManifest manifest = repo.getRepo().getManifest(); | |
64 IntVector clogRevsVector = new IntVector(changesets.size(), 0); | |
65 for (Nodeid n : changesets) { | |
66 clogRevsVector.add(clog.getRevisionIndex(n)); | |
67 } | |
68 clogRevsVector.sort(true); | |
69 final int[] clogRevs = clogRevsVector.toArray(); | |
70 System.out.printf("Changelog: %s\n", Arrays.toString(clogRevs)); | |
71 final IntMap<Nodeid> clogMap = new IntMap<Nodeid>(changesets.size()); | |
72 final IntVector manifestRevs = new IntVector(changesets.size(), 0); | |
73 final List<HgDataFile> files = new ArrayList<HgDataFile>(); | |
74 clog.range(new HgChangelog.Inspector() { | |
75 public void next(int revisionIndex, Nodeid nodeid, RawChangeset cset) throws HgRuntimeException { | |
76 clogMap.put(revisionIndex, nodeid); | |
77 manifestRevs.add(manifest.getRevisionIndex(cset.manifest())); | |
78 for (String f : cset.files()) { | |
79 HgDataFile df = repo.getRepo().getFileNode(f); | |
80 if (!files.contains(df)) { | |
81 files.add(df); | |
82 } | |
83 } | |
84 } | |
85 }, clogRevs); | |
86 manifestRevs.sort(true); | |
87 System.out.printf("Manifest: %s\n", Arrays.toString(manifestRevs.toArray(true))); | |
88 /////////////// | |
89 for (HgDataFile df : sortedByName(files)) { | |
90 RevlogStream s = repo.getImplAccess().getStream(df); | |
91 final IntVector fileRevs = new IntVector(); | |
92 s.iterate(0, TIP, false, new RevlogStream.Inspector() { | |
93 | |
94 public void next(int revisionIndex, int actualLen, int baseRevision, int linkRevision, int parent1Revision, int parent2Revision, byte[] nodeid, DataAccess data) throws HgRuntimeException { | |
95 if (Arrays.binarySearch(clogRevs, linkRevision) >= 0) { | |
96 fileRevs.add(revisionIndex); | |
97 } | |
98 } | |
99 }); | |
100 fileRevs.sort(true); | |
101 System.out.printf("%s: %s\n", df.getPath(), Arrays.toString(fileRevs.toArray(true))); | |
102 } | |
103 if (Boolean.FALSE.booleanValue()) { | |
104 return null; | |
105 } | |
106 /////////////// | |
107 // | |
108 final File bundleFile = File.createTempFile("hg4j-", "bundle"); | |
109 final OutputStreamSerializer outRaw = new OutputStreamSerializer(new FileOutputStream(bundleFile)); | |
110 outRaw.write("HG10UN".getBytes(), 0, 6); | |
111 // | |
112 RevlogStream clogStream = repo.getImplAccess().getChangelogStream(); | |
113 new ChunkGenerator(outRaw, clogMap).iterate(clogStream, clogRevs); | |
114 outRaw.writeInt(0); // null chunk for changelog group | |
115 // | |
116 RevlogStream manifestStream = repo.getImplAccess().getManifestStream(); | |
117 new ChunkGenerator(outRaw, clogMap).iterate(manifestStream, manifestRevs.toArray(true)); | |
118 outRaw.writeInt(0); // null chunk for manifest group | |
119 // | |
120 for (HgDataFile df : sortedByName(files)) { | |
121 RevlogStream s = repo.getImplAccess().getStream(df); | |
122 final IntVector fileRevs = new IntVector(); | |
123 s.iterate(0, TIP, false, new RevlogStream.Inspector() { | |
124 | |
125 public void next(int revisionIndex, int actualLen, int baseRevision, int linkRevision, int parent1Revision, int parent2Revision, byte[] nodeid, DataAccess data) throws HgRuntimeException { | |
126 if (Arrays.binarySearch(clogRevs, linkRevision) >= 0) { | |
127 fileRevs.add(revisionIndex); | |
128 } | |
129 } | |
130 }); | |
131 fileRevs.sort(true); | |
132 if (!fileRevs.isEmpty()) { | |
133 // although BundleFormat page says "filename length, filename" for a file, | |
134 // in fact there's a sort of 'filename chunk', i.e. filename length field includes | |
135 // not only length of filename, but also length of the field itseld, i.e. filename.length+sizeof(int) | |
136 byte[] fnameBytes = df.getPath().toString().getBytes(); // FIXME check encoding in native hg (and fix accordingly in HgBundle) | |
137 outRaw.writeInt(fnameBytes.length + 4); | |
138 outRaw.writeByte(fnameBytes); | |
139 new ChunkGenerator(outRaw, clogMap).iterate(s, fileRevs.toArray(true)); | |
140 outRaw.writeInt(0); // null chunk for file group | |
141 } | |
142 } | |
143 outRaw.done(); | |
144 //return new HgBundle(repo.getSessionContext(), repo.getDataAccess(), bundleFile); | |
145 return bundleFile; | |
146 } | |
147 | |
148 private static Collection<HgDataFile> sortedByName(List<HgDataFile> files) { | |
149 Collections.sort(files, new Comparator<HgDataFile>() { | |
150 | |
151 public int compare(HgDataFile o1, HgDataFile o2) { | |
152 return o1.getPath().compareTo(o2.getPath()); | |
153 } | |
154 }); | |
155 return files; | |
156 } | |
157 | |
158 | |
159 public static void main(String[] args) throws Exception { | |
160 final HgLookup hgLookup = new HgLookup(); | |
161 HgRepository hgRepo = hgLookup.detectFromWorkingDir(); | |
162 BundleGenerator bg = new BundleGenerator(HgInternals.getImplementationRepo(hgRepo)); | |
163 ArrayList<Nodeid> l = new ArrayList<Nodeid>(); | |
164 l.add(Nodeid.fromAscii("9ef1fab9f5e3d51d70941121dc27410e28069c2d")); // 640 | |
165 l.add(Nodeid.fromAscii("2f33f102a8fa59274a27ebbe1c2903cecac6c5d5")); // 639 | |
166 l.add(Nodeid.fromAscii("d074971287478f69ab0a64176ce2284d8c1e91c3")); // 638 | |
167 File bundleFile = bg.create(l); | |
168 HgBundle b = hgLookup.loadBundle(bundleFile); | |
169 // Bundle.dump(b); // FIXME dependency from dependant code | |
170 } | |
171 | |
172 private static class ChunkGenerator implements RevlogStream.Inspector { | |
173 | |
174 private final DataSerializer ds; | |
175 private final IntMap<Nodeid> parentMap; | |
176 private final IntMap<Nodeid> clogMap; | |
177 private byte[] prevContent; | |
178 private int startParent; | |
179 | |
180 public ChunkGenerator(DataSerializer dataSerializer, IntMap<Nodeid> clogNodeidMap) { | |
181 ds = dataSerializer; | |
182 parentMap = new IntMap<Nodeid>(clogNodeidMap.size());; | |
183 clogMap = clogNodeidMap; | |
184 } | |
185 | |
186 public void iterate(RevlogStream s, int[] revisions) throws HgRuntimeException { | |
187 int[] p = s.parents(revisions[0], new int[2]); | |
188 startParent = p[0]; | |
189 int[] revs2read; | |
190 if (startParent == NO_REVISION) { | |
191 revs2read = revisions; | |
192 prevContent = new byte[0]; | |
193 } else { | |
194 revs2read = new int[revisions.length + 1]; | |
195 revs2read[0] = startParent; | |
196 System.arraycopy(revisions, 0, revs2read, 1, revisions.length); | |
197 } | |
198 s.iterate(revs2read, true, this); | |
199 } | |
200 | |
201 public void next(int revisionIndex, int actualLen, int baseRevision, int linkRevision, int parent1Revision, int parent2Revision, byte[] nodeid, DataAccess data) throws HgRuntimeException { | |
202 try { | |
203 parentMap.put(revisionIndex, Nodeid.fromBinary(nodeid, 0)); | |
204 byte[] nextContent = data.byteArray(); | |
205 data.done(); | |
206 if (revisionIndex == startParent) { | |
207 prevContent = nextContent; | |
208 return; | |
209 } | |
210 Patch p = GeneratePatchInspector.delta(prevContent, nextContent); | |
211 prevContent = nextContent; | |
212 nextContent = null; | |
213 PatchDataSource pds = p.new PatchDataSource(); | |
214 int len = pds.serializeLength() + 84; | |
215 ds.writeInt(len); | |
216 ds.write(nodeid, 0, Nodeid.SIZE); | |
217 // TODO assert parents match those in previous group elements | |
218 if (parent1Revision != NO_REVISION) { | |
219 ds.writeByte(parentMap.get(parent1Revision).toByteArray()); | |
220 } else { | |
221 ds.writeByte(Nodeid.NULL.toByteArray()); | |
222 } | |
223 if (parent2Revision != NO_REVISION) { | |
224 ds.writeByte(parentMap.get(parent2Revision).toByteArray()); | |
225 } else { | |
226 ds.writeByte(Nodeid.NULL.toByteArray()); | |
227 } | |
228 ds.writeByte(clogMap.get(linkRevision).toByteArray()); | |
229 pds.serialize(ds); | |
230 } catch (IOException ex) { | |
231 // XXX odd to have object with IOException to use where no checked exception is allowed | |
232 throw new HgInvalidControlFileException(ex.getMessage(), ex, null); | |
233 } catch (HgIOException ex) { | |
234 throw new HgInvalidControlFileException(ex, true); // XXX any way to refactor ChunkGenerator not to get checked exception here? | |
235 } | |
236 } | |
237 } | |
238 | |
239 private static class OutputStreamSerializer extends DataSerializer { | |
240 private final OutputStream out; | |
241 public OutputStreamSerializer(OutputStream outputStream) { | |
242 out = outputStream; | |
243 } | |
244 | |
245 @Override | |
246 public void write(byte[] data, int offset, int length) throws HgIOException { | |
247 try { | |
248 out.write(data, offset, length); | |
249 } catch (IOException ex) { | |
250 throw new HgIOException(ex.getMessage(), ex, null); | |
251 } | |
252 } | |
253 | |
254 @Override | |
255 public void done() throws HgIOException { | |
256 try { | |
257 out.close(); | |
258 super.done(); | |
259 } catch (IOException ex) { | |
260 throw new HgIOException(ex.getMessage(), ex, null); | |
261 } | |
262 } | |
263 } | |
264 } |