comparison hg4j/src/main/java/org/tmatesoft/hg/core/HgCloneCommand.java @ 213:6ec4af642ba8 gradle

Project uses Gradle for build - actual changes
author Alexander Kitaev <kitaev@gmail.com>
date Tue, 10 May 2011 10:52:53 +0200
parents
children
comparison
equal deleted inserted replaced
212:edb2e2829352 213:6ec4af642ba8
1 /*
2 * Copyright (c) 2011 TMate Software Ltd
3 *
4 * This program is free software; you can redistribute it and/or modify
5 * it under the terms of the GNU General Public License as published by
6 * the Free Software Foundation; version 2 of the License.
7 *
8 * This program is distributed in the hope that it will be useful,
9 * but WITHOUT ANY WARRANTY; without even the implied warranty of
10 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 * GNU General Public License for more details.
12 *
13 * For information on how to redistribute this software under
14 * the terms of a license other than GNU General Public License
15 * contact TMate Software at support@hg4j.com
16 */
17 package org.tmatesoft.hg.core;
18
19 import static org.tmatesoft.hg.core.Nodeid.NULL;
20 import static org.tmatesoft.hg.internal.RequiresFile.*;
21
22 import java.io.ByteArrayOutputStream;
23 import java.io.File;
24 import java.io.FileOutputStream;
25 import java.io.IOException;
26 import java.nio.ByteBuffer;
27 import java.util.ArrayList;
28 import java.util.Collections;
29 import java.util.LinkedList;
30 import java.util.TreeMap;
31 import java.util.zip.DeflaterOutputStream;
32
33 import org.tmatesoft.hg.internal.ByteArrayDataAccess;
34 import org.tmatesoft.hg.internal.DataAccess;
35 import org.tmatesoft.hg.internal.DigestHelper;
36 import org.tmatesoft.hg.internal.Internals;
37 import org.tmatesoft.hg.repo.HgBundle;
38 import org.tmatesoft.hg.repo.HgBundle.GroupElement;
39 import org.tmatesoft.hg.repo.HgLookup;
40 import org.tmatesoft.hg.repo.HgRemoteRepository;
41 import org.tmatesoft.hg.repo.HgRepository;
42 import org.tmatesoft.hg.util.CancelledException;
43 import org.tmatesoft.hg.util.PathRewrite;
44
45 /**
46 * WORK IN PROGRESS, DO NOT USE
47 *
48 * @author Artem Tikhomirov
49 * @author TMate Software Ltd.
50 */
51 public class HgCloneCommand {
52
53 private File destination;
54 private HgRemoteRepository srcRepo;
55
56 public HgCloneCommand() {
57 }
58
59 /**
60 * @param folder location to become root of the repository (i.e. where <em>.hg</em> folder would reside). Either
61 * shall not exist or be empty otherwise.
62 * @return <code>this</code> for convenience
63 */
64 public HgCloneCommand destination(File folder) {
65 destination = folder;
66 return this;
67 }
68
69 public HgCloneCommand source(HgRemoteRepository hgRemote) {
70 srcRepo = hgRemote;
71 return this;
72 }
73
74 public HgRepository execute() throws HgException, CancelledException {
75 if (destination == null) {
76 throw new HgBadArgumentException("Destination not set", null);
77 }
78 if (srcRepo == null || srcRepo.isInvalid()) {
79 throw new HgBadArgumentException("Bad source repository", null);
80 }
81 if (destination.exists()) {
82 if (!destination.isDirectory()) {
83 throw new HgBadArgumentException(String.format("%s is not a directory", destination), null);
84 } else if (destination.list().length > 0) {
85 throw new HgBadArgumentException(String.format("% shall be empty", destination), null);
86 }
87 } else {
88 destination.mkdirs();
89 }
90 // if cloning remote repo, which can stream and no revision is specified -
91 // can use 'stream_out' wireproto
92 //
93 // pull all changes from the very beginning
94 // XXX consult getContext() if by any chance has a bundle ready, if not, then read and register
95 HgBundle completeChanges = srcRepo.getChanges(Collections.singletonList(NULL));
96 WriteDownMate mate = new WriteDownMate(destination);
97 try {
98 // instantiate new repo in the destdir
99 mate.initEmptyRepository();
100 // pull changes
101 completeChanges.inspectAll(mate);
102 mate.complete();
103 } catch (IOException ex) {
104 throw new HgException(ex);
105 } finally {
106 completeChanges.unlink();
107 }
108 return new HgLookup().detect(destination);
109 }
110
111
112 // 1. process changelog, memorize nodeids to index
113 // 2. process manifest, using map from step 3, collect manifest nodeids
114 // 3. process every file, using map from 3, and consult set from step 4 to ensure repo is correct
115 private static class WriteDownMate implements HgBundle.Inspector {
116 private final File hgDir;
117 private final PathRewrite storagePathHelper;
118 private FileOutputStream indexFile;
119 private String filename; // human-readable name of the file being written, for log/exception purposes
120
121 private final TreeMap<Nodeid, Integer> changelogIndexes = new TreeMap<Nodeid, Integer>();
122 private boolean collectChangelogIndexes = false;
123
124 private int base = -1;
125 private long offset = 0;
126 private DataAccess prevRevContent;
127 private final DigestHelper dh = new DigestHelper();
128 private final ArrayList<Nodeid> revisionSequence = new ArrayList<Nodeid>(); // last visited nodes first
129
130 private final LinkedList<String> fncacheFiles = new LinkedList<String>();
131 private Internals implHelper;
132
133 public WriteDownMate(File destDir) {
134 hgDir = new File(destDir, ".hg");
135 implHelper = new Internals();
136 implHelper.setStorageConfig(1, STORE | FNCACHE | DOTENCODE);
137 storagePathHelper = implHelper.buildDataFilesHelper();
138 }
139
140 public void initEmptyRepository() throws IOException {
141 implHelper.initEmptyRepository(hgDir);
142 }
143
144 public void complete() throws IOException {
145 FileOutputStream fncacheFile = new FileOutputStream(new File(hgDir, "store/fncache"));
146 for (String s : fncacheFiles) {
147 fncacheFile.write(s.getBytes());
148 fncacheFile.write(0x0A); // http://mercurial.selenic.com/wiki/fncacheRepoFormat
149 }
150 fncacheFile.close();
151 }
152
153 public void changelogStart() {
154 try {
155 base = -1;
156 offset = 0;
157 revisionSequence.clear();
158 indexFile = new FileOutputStream(new File(hgDir, filename = "store/00changelog.i"));
159 collectChangelogIndexes = true;
160 } catch (IOException ex) {
161 throw new HgBadStateException(ex);
162 }
163 }
164
165 public void changelogEnd() {
166 try {
167 if (prevRevContent != null) {
168 prevRevContent.done();
169 prevRevContent = null;
170 }
171 collectChangelogIndexes = false;
172 indexFile.close();
173 indexFile = null;
174 filename = null;
175 } catch (IOException ex) {
176 throw new HgBadStateException(ex);
177 }
178 }
179
180 public void manifestStart() {
181 try {
182 base = -1;
183 offset = 0;
184 revisionSequence.clear();
185 indexFile = new FileOutputStream(new File(hgDir, filename = "store/00manifest.i"));
186 } catch (IOException ex) {
187 throw new HgBadStateException(ex);
188 }
189 }
190
191 public void manifestEnd() {
192 try {
193 if (prevRevContent != null) {
194 prevRevContent.done();
195 prevRevContent = null;
196 }
197 indexFile.close();
198 indexFile = null;
199 filename = null;
200 } catch (IOException ex) {
201 throw new HgBadStateException(ex);
202 }
203 }
204
205 public void fileStart(String name) {
206 try {
207 base = -1;
208 offset = 0;
209 revisionSequence.clear();
210 fncacheFiles.add("data/" + name + ".i"); // FIXME this is pure guess,
211 // need to investigate more how filenames are kept in fncache
212 File file = new File(hgDir, filename = storagePathHelper.rewrite(name));
213 file.getParentFile().mkdirs();
214 indexFile = new FileOutputStream(file);
215 } catch (IOException ex) {
216 throw new HgBadStateException(ex);
217 }
218 }
219
220 public void fileEnd(String name) {
221 try {
222 if (prevRevContent != null) {
223 prevRevContent.done();
224 prevRevContent = null;
225 }
226 indexFile.close();
227 indexFile = null;
228 filename = null;
229 } catch (IOException ex) {
230 throw new HgBadStateException(ex);
231 }
232 }
233
234 private int knownRevision(Nodeid p) {
235 if (NULL.equals(p)) {
236 return -1;
237 } else {
238 for (int i = revisionSequence.size() - 1; i >= 0; i--) {
239 if (revisionSequence.get(i).equals(p)) {
240 return i;
241 }
242 }
243 }
244 throw new HgBadStateException(String.format("Can't find index of %s for file %s", p.shortNotation(), filename));
245 }
246
247 public boolean element(GroupElement ge) {
248 try {
249 assert indexFile != null;
250 boolean writeComplete = false;
251 Nodeid p1 = ge.firstParent();
252 Nodeid p2 = ge.secondParent();
253 if (NULL.equals(p1) && NULL.equals(p2) /* or forced flag, does REVIDX_PUNCHED_FLAG indicate that? */) {
254 prevRevContent = new ByteArrayDataAccess(new byte[0]);
255 writeComplete = true;
256 }
257 byte[] content = ge.apply(prevRevContent);
258 byte[] calculated = dh.sha1(p1, p2, content).asBinary();
259 final Nodeid node = ge.node();
260 if (!node.equalsTo(calculated)) {
261 throw new HgBadStateException(String.format("Checksum failed: expected %s, calculated %s. File %s", node, calculated, filename));
262 }
263 final int link;
264 if (collectChangelogIndexes) {
265 changelogIndexes.put(node, revisionSequence.size());
266 link = revisionSequence.size();
267 } else {
268 Integer csRev = changelogIndexes.get(ge.cset());
269 if (csRev == null) {
270 throw new HgBadStateException(String.format("Changelog doesn't contain revision %s of %s", ge.cset().shortNotation(), filename));
271 }
272 link = csRev.intValue();
273 }
274 final int p1Rev = knownRevision(p1), p2Rev = knownRevision(p2);
275 DataAccess patchContent = ge.rawData();
276 writeComplete = writeComplete || patchContent.length() >= (/* 3/4 of actual */content.length - (content.length >>> 2));
277 if (writeComplete) {
278 base = revisionSequence.size();
279 }
280 final byte[] sourceData = writeComplete ? content : patchContent.byteArray();
281 final byte[] data;
282 ByteArrayOutputStream bos = new ByteArrayOutputStream(content.length);
283 DeflaterOutputStream dos = new DeflaterOutputStream(bos);
284 dos.write(sourceData);
285 dos.close();
286 final byte[] compressedData = bos.toByteArray();
287 dos = null;
288 bos = null;
289 final Byte dataPrefix;
290 if (compressedData.length >= (sourceData.length - (sourceData.length >>> 2))) {
291 // compression wasn't too effective,
292 data = sourceData;
293 dataPrefix = 'u';
294 } else {
295 data = compressedData;
296 dataPrefix = null;
297 }
298
299 ByteBuffer header = ByteBuffer.allocate(64 /* REVLOGV1_RECORD_SIZE */);
300 if (offset == 0) {
301 final int INLINEDATA = 1 << 16;
302 header.putInt(1 /* RevlogNG */ | INLINEDATA);
303 header.putInt(0);
304 } else {
305 header.putLong(offset << 16);
306 }
307 final int compressedLen = data.length + (dataPrefix == null ? 0 : 1);
308 header.putInt(compressedLen);
309 header.putInt(content.length);
310 header.putInt(base);
311 header.putInt(link);
312 header.putInt(p1Rev);
313 header.putInt(p2Rev);
314 header.put(node.toByteArray());
315 // assume 12 bytes left are zeros
316 indexFile.write(header.array());
317 if (dataPrefix != null) {
318 indexFile.write(dataPrefix.byteValue());
319 }
320 indexFile.write(data);
321 //
322 offset += compressedLen;
323 revisionSequence.add(node);
324 prevRevContent.done();
325 prevRevContent = new ByteArrayDataAccess(content);
326 } catch (IOException ex) {
327 throw new HgBadStateException(ex);
328 }
329 return true;
330 }
331 }
332
333 }