Mercurial > hg4j
comparison src/org/tmatesoft/hg/internal/BundleGenerator.java @ 645:14dac192aa26
Push: phase2 - upload bundle with changes to remote server
author | Artem Tikhomirov <tikhomirov.artem@gmail.com> |
---|---|
date | Thu, 20 Jun 2013 19:15:09 +0200 |
parents | 1deea2f33218 |
children | 6e98d34eaca8 |
comparison
equal
deleted
inserted
replaced
644:1deea2f33218 | 645:14dac192aa26 |
---|---|
20 import static org.tmatesoft.hg.repo.HgRepository.TIP; | 20 import static org.tmatesoft.hg.repo.HgRepository.TIP; |
21 | 21 |
22 import java.io.File; | 22 import java.io.File; |
23 import java.io.FileOutputStream; | 23 import java.io.FileOutputStream; |
24 import java.io.IOException; | 24 import java.io.IOException; |
25 import java.io.OutputStream; | |
26 import java.util.ArrayList; | 25 import java.util.ArrayList; |
27 import java.util.Arrays; | 26 import java.util.Arrays; |
28 import java.util.Collection; | 27 import java.util.Collection; |
29 import java.util.Collections; | 28 import java.util.Collections; |
30 import java.util.Comparator; | 29 import java.util.Comparator; |
30 import java.util.HashSet; | |
31 import java.util.List; | 31 import java.util.List; |
32 | 32 import java.util.Set; |
33 import org.tmatesoft.hg.console.Bundle; | 33 |
34 import org.tmatesoft.hg.core.HgIOException; | 34 import org.tmatesoft.hg.core.HgIOException; |
35 import org.tmatesoft.hg.core.Nodeid; | 35 import org.tmatesoft.hg.core.Nodeid; |
36 import org.tmatesoft.hg.internal.DataSerializer.OutputStreamSerializer; | |
36 import org.tmatesoft.hg.internal.Patch.PatchDataSource; | 37 import org.tmatesoft.hg.internal.Patch.PatchDataSource; |
37 import org.tmatesoft.hg.repo.HgBundle; | 38 import org.tmatesoft.hg.repo.HgBundle; |
38 import org.tmatesoft.hg.repo.HgChangelog; | 39 import org.tmatesoft.hg.repo.HgChangelog; |
39 import org.tmatesoft.hg.repo.HgChangelog.RawChangeset; | 40 import org.tmatesoft.hg.repo.HgChangelog.RawChangeset; |
40 import org.tmatesoft.hg.repo.HgDataFile; | 41 import org.tmatesoft.hg.repo.HgDataFile; |
70 System.out.printf("Changelog: %s\n", Arrays.toString(clogRevs)); | 71 System.out.printf("Changelog: %s\n", Arrays.toString(clogRevs)); |
71 final IntMap<Nodeid> clogMap = new IntMap<Nodeid>(changesets.size()); | 72 final IntMap<Nodeid> clogMap = new IntMap<Nodeid>(changesets.size()); |
72 final IntVector manifestRevs = new IntVector(changesets.size(), 0); | 73 final IntVector manifestRevs = new IntVector(changesets.size(), 0); |
73 final List<HgDataFile> files = new ArrayList<HgDataFile>(); | 74 final List<HgDataFile> files = new ArrayList<HgDataFile>(); |
74 clog.range(new HgChangelog.Inspector() { | 75 clog.range(new HgChangelog.Inspector() { |
76 private Set<String> seenFiles = new HashSet<String>(); | |
75 public void next(int revisionIndex, Nodeid nodeid, RawChangeset cset) throws HgRuntimeException { | 77 public void next(int revisionIndex, Nodeid nodeid, RawChangeset cset) throws HgRuntimeException { |
76 clogMap.put(revisionIndex, nodeid); | 78 clogMap.put(revisionIndex, nodeid); |
77 manifestRevs.add(manifest.getRevisionIndex(cset.manifest())); | 79 manifestRevs.add(manifest.getRevisionIndex(cset.manifest())); |
78 for (String f : cset.files()) { | 80 for (String f : cset.files()) { |
81 if (seenFiles.contains(f)) { | |
82 continue; | |
83 } | |
84 seenFiles.add(f); | |
79 HgDataFile df = repo.getRepo().getFileNode(f); | 85 HgDataFile df = repo.getRepo().getFileNode(f); |
80 if (!files.contains(df)) { | 86 files.add(df); |
81 files.add(df); | |
82 } | |
83 } | 87 } |
84 } | 88 } |
85 }, clogRevs); | 89 }, clogRevs); |
86 manifestRevs.sort(true); | 90 manifestRevs.sort(true); |
87 System.out.printf("Manifest: %s\n", Arrays.toString(manifestRevs.toArray(true))); | 91 System.out.printf("Manifest: %s\n", Arrays.toString(manifestRevs.toArray(true))); |
104 return null; | 108 return null; |
105 } | 109 } |
106 /////////////// | 110 /////////////// |
107 // | 111 // |
108 final File bundleFile = File.createTempFile("hg4j-", "bundle"); | 112 final File bundleFile = File.createTempFile("hg4j-", "bundle"); |
109 final OutputStreamSerializer outRaw = new OutputStreamSerializer(new FileOutputStream(bundleFile)); | 113 final FileOutputStream osBundle = new FileOutputStream(bundleFile); |
114 final OutputStreamSerializer outRaw = new OutputStreamSerializer(osBundle); | |
110 outRaw.write("HG10UN".getBytes(), 0, 6); | 115 outRaw.write("HG10UN".getBytes(), 0, 6); |
111 // | 116 // |
112 RevlogStream clogStream = repo.getImplAccess().getChangelogStream(); | 117 RevlogStream clogStream = repo.getImplAccess().getChangelogStream(); |
113 new ChunkGenerator(outRaw, clogMap).iterate(clogStream, clogRevs); | 118 new ChunkGenerator(outRaw, clogMap).iterate(clogStream, clogRevs); |
114 outRaw.writeInt(0); // null chunk for changelog group | 119 outRaw.writeInt(0); // null chunk for changelog group |
138 outRaw.writeByte(fnameBytes); | 143 outRaw.writeByte(fnameBytes); |
139 new ChunkGenerator(outRaw, clogMap).iterate(s, fileRevs.toArray(true)); | 144 new ChunkGenerator(outRaw, clogMap).iterate(s, fileRevs.toArray(true)); |
140 outRaw.writeInt(0); // null chunk for file group | 145 outRaw.writeInt(0); // null chunk for file group |
141 } | 146 } |
142 } | 147 } |
148 outRaw.writeInt(0); // null chunk to indicate no more files (although BundleFormat page doesn't mention this) | |
143 outRaw.done(); | 149 outRaw.done(); |
150 osBundle.flush(); | |
151 osBundle.close(); | |
144 //return new HgBundle(repo.getSessionContext(), repo.getDataAccess(), bundleFile); | 152 //return new HgBundle(repo.getSessionContext(), repo.getDataAccess(), bundleFile); |
145 return bundleFile; | 153 return bundleFile; |
146 } | 154 } |
147 | 155 |
148 private static Collection<HgDataFile> sortedByName(List<HgDataFile> files) { | 156 private static Collection<HgDataFile> sortedByName(List<HgDataFile> files) { |
233 } catch (HgIOException ex) { | 241 } catch (HgIOException ex) { |
234 throw new HgInvalidControlFileException(ex, true); // XXX any way to refactor ChunkGenerator not to get checked exception here? | 242 throw new HgInvalidControlFileException(ex, true); // XXX any way to refactor ChunkGenerator not to get checked exception here? |
235 } | 243 } |
236 } | 244 } |
237 } | 245 } |
238 | |
239 private static class OutputStreamSerializer extends DataSerializer { | |
240 private final OutputStream out; | |
241 public OutputStreamSerializer(OutputStream outputStream) { | |
242 out = outputStream; | |
243 } | |
244 | |
245 @Override | |
246 public void write(byte[] data, int offset, int length) throws HgIOException { | |
247 try { | |
248 out.write(data, offset, length); | |
249 } catch (IOException ex) { | |
250 throw new HgIOException(ex.getMessage(), ex, null); | |
251 } | |
252 } | |
253 | |
254 @Override | |
255 public void done() throws HgIOException { | |
256 try { | |
257 out.close(); | |
258 super.done(); | |
259 } catch (IOException ex) { | |
260 throw new HgIOException(ex.getMessage(), ex, null); | |
261 } | |
262 } | |
263 } | |
264 } | 246 } |