Mercurial > hg4j
comparison cmdline/org/tmatesoft/hg/console/Clone.java @ 170:71ddbf8603e8
Initial clone: populate given directory from a bundle. Everything but remote server access is there, albeit prototype code style
author | Artem Tikhomirov <tikhomirov.artem@gmail.com> |
---|---|
date | Wed, 23 Mar 2011 20:46:00 +0100 |
parents | |
children | 2c3e96674e2a |
comparison
equal
deleted
inserted
replaced
169:8c8e3f372fa1 | 170:71ddbf8603e8 |
---|---|
1 /* | |
2 * Copyright (c) 2011 TMate Software Ltd | |
3 * | |
4 * This program is free software; you can redistribute it and/or modify | |
5 * it under the terms of the GNU General Public License as published by | |
6 * the Free Software Foundation; version 2 of the License. | |
7 * | |
8 * This program is distributed in the hope that it will be useful, | |
9 * but WITHOUT ANY WARRANTY; without even the implied warranty of | |
10 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
11 * GNU General Public License for more details. | |
12 * | |
13 * For information on how to redistribute this software under | |
14 * the terms of a license other than GNU General Public License | |
15 * contact TMate Software at support@hg4j.com | |
16 */ | |
17 package org.tmatesoft.hg.console; | |
18 | |
19 import static org.tmatesoft.hg.core.Nodeid.NULL; | |
20 import static org.tmatesoft.hg.internal.RequiresFile.*; | |
21 import static org.tmatesoft.hg.internal.RequiresFile.DOTENCODE; | |
22 import static org.tmatesoft.hg.internal.RequiresFile.FNCACHE; | |
23 | |
24 import java.io.ByteArrayOutputStream; | |
25 import java.io.File; | |
26 import java.io.FileOutputStream; | |
27 import java.io.IOException; | |
28 import java.net.URL; | |
29 import java.nio.ByteBuffer; | |
30 import java.util.ArrayList; | |
31 import java.util.Collections; | |
32 import java.util.LinkedList; | |
33 import java.util.List; | |
34 import java.util.TreeMap; | |
35 import java.util.zip.DeflaterOutputStream; | |
36 | |
37 import org.tmatesoft.hg.core.HgBadStateException; | |
38 import org.tmatesoft.hg.core.HgRepoFacade; | |
39 import org.tmatesoft.hg.core.Nodeid; | |
40 import org.tmatesoft.hg.internal.ByteArrayDataAccess; | |
41 import org.tmatesoft.hg.internal.DataAccess; | |
42 import org.tmatesoft.hg.internal.DigestHelper; | |
43 import org.tmatesoft.hg.internal.Internals; | |
44 import org.tmatesoft.hg.internal.RequiresFile; | |
45 import org.tmatesoft.hg.internal.RevlogStream; | |
46 import org.tmatesoft.hg.repo.HgBundle; | |
47 import org.tmatesoft.hg.repo.HgLookup; | |
48 import org.tmatesoft.hg.repo.HgRemoteRepository; | |
49 import org.tmatesoft.hg.repo.HgBundle.GroupElement; | |
50 import org.tmatesoft.hg.repo.HgRepository; | |
51 import org.tmatesoft.hg.util.PathRewrite; | |
52 | |
53 /** | |
54 * WORK IN PROGRESS, DO NOT USE | |
55 * | |
56 * @author Artem Tikhomirov | |
57 * @author TMate Software Ltd. | |
58 */ | |
59 public class Clone { | |
60 /* | |
61 * Changegroup: | |
62 * http://mercurial.selenic.com/wiki/Merge | |
63 * http://mercurial.selenic.com/wiki/WireProtocol | |
64 * | |
65 * according to latter, bundleformat data is sent through zlib | |
66 * (there's no header like HG10?? with the server output, though, | |
67 * as one may expect according to http://mercurial.selenic.com/wiki/BundleFormat) | |
68 */ | |
69 public static void main(String[] args) throws Exception { | |
70 Options cmdLineOpts = Options.parse(args); | |
71 HgRepoFacade hgRepo = new HgRepoFacade(); | |
72 if (!hgRepo.init(cmdLineOpts.findRepository())) { | |
73 System.err.printf("Can't find repository in: %s\n", hgRepo.getRepository().getLocation()); | |
74 return; | |
75 } | |
76 File destDir = new File("/temp/hg/clone-01/"); | |
77 if (destDir.exists()) { | |
78 if (!destDir.isDirectory()) { | |
79 throw new IllegalArgumentException(); | |
80 } else if (destDir.list().length > 0) { | |
81 throw new IllegalArgumentException(); | |
82 } | |
83 } else { | |
84 destDir.mkdirs(); | |
85 } | |
86 // if cloning remote repo, which can stream and no revision is specified - | |
87 // can use 'stream_out' wireproto | |
88 // | |
89 // //////// 1. from Remote.java take code that asks changegroup from remote server and write it down to temp file | |
90 // //////// 2. then, read the file with HgBundle | |
91 // //////// 3. process changelog, memorize nodeids to index | |
92 // //////// 4. process manifest, using map from step 3, collect manifest nodeids | |
93 // //////// 5. process every file, using map from 3, and consult set from step 4 to ensure repo is correct | |
94 // access source | |
95 HgRemoteRepository remoteRepo = new HgRemoteRepository();// new HgLookup().detect(new URL("https://asd/hg/")); | |
96 // discover changes | |
97 HgBundle completeChanges = remoteRepo.getChanges(Collections.singletonList(NULL)); | |
98 WriteDownMate mate = new WriteDownMate(destDir); | |
99 // instantiate new repo in the destdir | |
100 mate.initEmptyRepository(); | |
101 // pull changes | |
102 completeChanges.inspectAll(mate); | |
103 mate.complete(); | |
104 // completeChanges.unlink(); | |
105 } | |
106 | |
107 private static class WriteDownMate implements HgBundle.Inspector { | |
108 private final File hgDir; | |
109 private FileOutputStream indexFile; | |
110 private final PathRewrite storagePathHelper; | |
111 | |
112 private final TreeMap<Nodeid, Integer> changelogIndexes = new TreeMap<Nodeid, Integer>(); | |
113 private boolean collectChangelogIndexes = false; | |
114 | |
115 private int base = -1; | |
116 private long offset = 0; | |
117 private DataAccess prevRevContent; | |
118 private final DigestHelper dh = new DigestHelper(); | |
119 private final ArrayList<Nodeid> revisionSequence = new ArrayList<Nodeid>(); // last visited nodes first | |
120 | |
121 private final LinkedList<String> fncacheFiles = new LinkedList<String>(); | |
122 | |
123 public WriteDownMate(File destDir) { | |
124 hgDir = new File(destDir, ".hg"); | |
125 Internals i = new Internals(); | |
126 i.setStorageConfig(1, STORE | FNCACHE | DOTENCODE); | |
127 storagePathHelper = i.buildDataFilesHelper(); | |
128 } | |
129 | |
130 public void initEmptyRepository() throws IOException { | |
131 hgDir.mkdir(); | |
132 FileOutputStream requiresFile = new FileOutputStream(new File(hgDir, "requires")); | |
133 requiresFile.write("revlogv1\nstore\nfncache\ndotencode\n".getBytes()); | |
134 requiresFile.close(); | |
135 new File(hgDir, "store").mkdir(); // with that, hg verify says ok. | |
136 } | |
137 | |
138 public void complete() throws IOException { | |
139 FileOutputStream fncacheFile = new FileOutputStream(new File(hgDir, "store/fncache")); | |
140 for (String s : fncacheFiles) { | |
141 fncacheFile.write(s.getBytes()); | |
142 fncacheFile.write(0x0A); // http://mercurial.selenic.com/wiki/fncacheRepoFormat | |
143 } | |
144 fncacheFile.close(); | |
145 } | |
146 | |
147 public void changelogStart() { | |
148 try { | |
149 base = -1; | |
150 offset = 0; | |
151 revisionSequence.clear(); | |
152 indexFile = new FileOutputStream(new File(hgDir, "store/00changelog.i")); | |
153 collectChangelogIndexes = true; | |
154 } catch (IOException ex) { | |
155 throw new HgBadStateException(ex); | |
156 } | |
157 } | |
158 | |
159 public void changelogEnd() { | |
160 try { | |
161 if (prevRevContent != null) { | |
162 prevRevContent.done(); | |
163 prevRevContent = null; | |
164 } | |
165 collectChangelogIndexes = false; | |
166 indexFile.close(); | |
167 indexFile = null; | |
168 } catch (IOException ex) { | |
169 throw new HgBadStateException(ex); | |
170 } | |
171 } | |
172 | |
173 public void manifestStart() { | |
174 try { | |
175 base = -1; | |
176 offset = 0; | |
177 revisionSequence.clear(); | |
178 indexFile = new FileOutputStream(new File(hgDir, "store/00manifest.i")); | |
179 } catch (IOException ex) { | |
180 throw new HgBadStateException(ex); | |
181 } | |
182 } | |
183 | |
184 public void manifestEnd() { | |
185 try { | |
186 if (prevRevContent != null) { | |
187 prevRevContent.done(); | |
188 prevRevContent = null; | |
189 } | |
190 indexFile.close(); | |
191 indexFile = null; | |
192 } catch (IOException ex) { | |
193 throw new HgBadStateException(ex); | |
194 } | |
195 } | |
196 | |
197 public void fileStart(String name) { | |
198 try { | |
199 base = -1; | |
200 offset = 0; | |
201 revisionSequence.clear(); | |
202 fncacheFiles.add("data/" + name + ".i"); // FIXME this is pure guess, | |
203 // need to investigate more how filenames are kept in fncache | |
204 File file = new File(hgDir, storagePathHelper.rewrite(name)); | |
205 file.getParentFile().mkdirs(); | |
206 indexFile = new FileOutputStream(file); | |
207 } catch (IOException ex) { | |
208 throw new HgBadStateException(ex); | |
209 } | |
210 } | |
211 | |
212 public void fileEnd(String name) { | |
213 try { | |
214 if (prevRevContent != null) { | |
215 prevRevContent.done(); | |
216 prevRevContent = null; | |
217 } | |
218 indexFile.close(); | |
219 indexFile = null; | |
220 } catch (IOException ex) { | |
221 throw new HgBadStateException(ex); | |
222 } | |
223 } | |
224 | |
225 private int knownRevision(Nodeid p) { | |
226 if (NULL.equals(p)) { | |
227 return -1; | |
228 } else { | |
229 for (int i = revisionSequence.size() - 1; i >= 0; i--) { | |
230 if (revisionSequence.get(i).equals(p)) { | |
231 return i; | |
232 } | |
233 } | |
234 } | |
235 throw new HgBadStateException(String.format("Can't find index of %s", p.shortNotation())); | |
236 } | |
237 | |
238 public boolean element(GroupElement ge) { | |
239 try { | |
240 assert indexFile != null; | |
241 boolean writeComplete = false; | |
242 Nodeid p1 = ge.firstParent(); | |
243 Nodeid p2 = ge.secondParent(); | |
244 if (NULL.equals(p1) && NULL.equals(p2) /* or forced flag, does REVIDX_PUNCHED_FLAG indicate that? */) { | |
245 prevRevContent = new ByteArrayDataAccess(new byte[0]); | |
246 writeComplete = true; | |
247 } | |
248 byte[] content = ge.apply(prevRevContent); | |
249 byte[] calculated = dh.sha1(p1, p2, content).asBinary(); | |
250 final Nodeid node = ge.node(); | |
251 if (!node.equalsTo(calculated)) { | |
252 throw new HgBadStateException("Checksum failed"); | |
253 } | |
254 final int link; | |
255 if (collectChangelogIndexes) { | |
256 changelogIndexes.put(node, revisionSequence.size()); | |
257 link = revisionSequence.size(); | |
258 } else { | |
259 Integer csRev = changelogIndexes.get(ge.cset()); | |
260 if (csRev == null) { | |
261 throw new HgBadStateException(String.format("Changelog doesn't contain revision %s", ge.cset().shortNotation())); | |
262 } | |
263 link = csRev.intValue(); | |
264 } | |
265 final int p1Rev = knownRevision(p1), p2Rev = knownRevision(p2); | |
266 DataAccess patchContent = ge.rawData(); | |
267 writeComplete = writeComplete || patchContent.length() >= (/* 3/4 of actual */content.length - (content.length >>> 2)); | |
268 if (writeComplete) { | |
269 base = revisionSequence.size(); | |
270 } | |
271 final byte[] sourceData = writeComplete ? content : patchContent.byteArray(); | |
272 final byte[] data; | |
273 ByteArrayOutputStream bos = new ByteArrayOutputStream(content.length); | |
274 DeflaterOutputStream dos = new DeflaterOutputStream(bos); | |
275 dos.write(sourceData); | |
276 dos.close(); | |
277 final byte[] compressedData = bos.toByteArray(); | |
278 dos = null; | |
279 bos = null; | |
280 final Byte dataPrefix; | |
281 if (compressedData.length >= (sourceData.length - (sourceData.length >>> 2))) { | |
282 // compression wasn't too effective, | |
283 data = sourceData; | |
284 dataPrefix = 'u'; | |
285 } else { | |
286 data = compressedData; | |
287 dataPrefix = null; | |
288 } | |
289 | |
290 ByteBuffer header = ByteBuffer.allocate(64 /* REVLOGV1_RECORD_SIZE */); | |
291 if (offset == 0) { | |
292 final int INLINEDATA = 1 << 16; | |
293 header.putInt(1 /* RevlogNG */ | INLINEDATA); | |
294 header.putInt(0); | |
295 } else { | |
296 header.putLong(offset << 16); | |
297 } | |
298 final int compressedLen = data.length + (dataPrefix == null ? 0 : 1); | |
299 header.putInt(compressedLen); | |
300 header.putInt(content.length); | |
301 header.putInt(base); | |
302 header.putInt(link); | |
303 header.putInt(p1Rev); | |
304 header.putInt(p2Rev); | |
305 header.put(node.toByteArray()); | |
306 // assume 12 bytes left are zeros | |
307 indexFile.write(header.array()); | |
308 if (dataPrefix != null) { | |
309 indexFile.write(dataPrefix.byteValue()); | |
310 } | |
311 indexFile.write(data); | |
312 // | |
313 offset += compressedLen; | |
314 revisionSequence.add(node); | |
315 prevRevContent.done(); | |
316 prevRevContent = new ByteArrayDataAccess(content); | |
317 } catch (IOException ex) { | |
318 throw new HgBadStateException(ex); | |
319 } | |
320 return true; | |
321 } | |
322 } | |
323 } |