comparison src/org/tmatesoft/hg/repo/HgDataFile.java @ 317:09628675bcee

Rework file history build approach to match rest of the API
author Artem Tikhomirov <tikhomirov.artem@gmail.com>
date Thu, 29 Sep 2011 03:20:28 +0200
parents 971baa95fb07
children d68dcb3b5f49
comparison
equal deleted inserted replaced
316:ee6b467c1a5f 317:09628675bcee
28 import java.util.ArrayList; 28 import java.util.ArrayList;
29 import java.util.Arrays; 29 import java.util.Arrays;
30 import java.util.Collection; 30 import java.util.Collection;
31 import java.util.Collections; 31 import java.util.Collections;
32 import java.util.List; 32 import java.util.List;
33 import java.util.NoSuchElementException;
34 33
35 import org.tmatesoft.hg.core.HgDataStreamException; 34 import org.tmatesoft.hg.core.HgDataStreamException;
36 import org.tmatesoft.hg.core.HgException; 35 import org.tmatesoft.hg.core.HgException;
37 import org.tmatesoft.hg.core.Nodeid; 36 import org.tmatesoft.hg.core.Nodeid;
38 import org.tmatesoft.hg.internal.DataAccess; 37 import org.tmatesoft.hg.internal.DataAccess;
39 import org.tmatesoft.hg.internal.Experimental;
40 import org.tmatesoft.hg.internal.FilterByteChannel; 38 import org.tmatesoft.hg.internal.FilterByteChannel;
41 import org.tmatesoft.hg.internal.FilterDataAccess; 39 import org.tmatesoft.hg.internal.FilterDataAccess;
42 import org.tmatesoft.hg.internal.IntMap; 40 import org.tmatesoft.hg.internal.IntMap;
43 import org.tmatesoft.hg.internal.RevlogStream; 41 import org.tmatesoft.hg.internal.RevlogStream;
44 import org.tmatesoft.hg.repo.HgChangelog.RawChangeset;
45 import org.tmatesoft.hg.util.ByteChannel; 42 import org.tmatesoft.hg.util.ByteChannel;
46 import org.tmatesoft.hg.util.CancelSupport; 43 import org.tmatesoft.hg.util.CancelSupport;
47 import org.tmatesoft.hg.util.CancelledException; 44 import org.tmatesoft.hg.util.CancelledException;
48 import org.tmatesoft.hg.util.Pair; 45 import org.tmatesoft.hg.util.Pair;
49 import org.tmatesoft.hg.util.Path; 46 import org.tmatesoft.hg.util.Path;
229 // shall not happen, unless we changed ContentPipe or its subclass 226 // shall not happen, unless we changed ContentPipe or its subclass
230 throw new HgDataStreamException(getPath(), ex.getClass().getName(), ex); 227 throw new HgDataStreamException(getPath(), ex.getClass().getName(), ex);
231 } 228 }
232 } 229 }
233 230
234 @Experimental(reason="Investigate approaches to build complete file history log") 231 private static class HistoryNode {
235 public interface HistoryWalker { 232 int changeset;
236 void next(); 233 Nodeid cset;
237 boolean hasNext(); 234 HistoryNode parent1, parent2;
238 Nodeid changesetRevision(); 235 List<HistoryNode> children;
239 RawChangeset changeset(); 236
240 boolean isFork(); 237 HistoryNode(int cs, HistoryNode p1, HistoryNode p2) {
241 boolean isJoin(); 238 changeset = cs;
242 Pair<Nodeid, Nodeid> parentChangesets(); 239 parent1 = p1;
243 Collection<Nodeid> childChangesets(); 240 parent2 = p2;
244 } 241 if (p1 != null) {
245 242 p1.addChild(this);
246 public HistoryWalker history() { 243 }
247 final boolean[] needsSorting = { false }; 244 if (p2 != null) {
248 class HistoryNode { 245 p2.addChild(this);
249 int changeset; 246 }
250 Nodeid cset; 247 }
251 HistoryNode parent1, parent2; 248
252 List<HistoryNode> children; 249 Nodeid changesetRevision() {
253 250 assert cset != null : "we initialize all csets prior to use";
254 public HistoryNode(int cs, HistoryNode p1, HistoryNode p2) { 251 return cset;
255 changeset = cs; 252 }
256 parent1 = p1; 253
257 parent2 = p2; 254 void addChild(HistoryNode child) {
258 if (p1 != null) { 255 if (children == null) {
259 p1.addChild(this); 256 children = new ArrayList<HistoryNode>(2);
260 } 257 }
261 if (p2 != null) { 258 children.add(child);
262 p2.addChild(this); 259 }
263 } 260 }
264 } 261
265 262 public void history(HgChangelog.TreeInspector inspector) {
266 public Nodeid changesetRevision() { 263 final CancelSupport cancelSupport = CancelSupport.Factory.get(inspector);
267 if (cset == null) { 264 try {
268 cset = getRepo().getChangelog().getRevision(changeset); 265 final boolean[] needsSorting = { false };
269 } 266 final HistoryNode[] completeHistory = new HistoryNode[getRevisionCount()];
270 return cset; 267 final int[] commitRevisions = new int[completeHistory.length];
271 } 268 RevlogStream.Inspector insp = new RevlogStream.Inspector() {
272 269 public void next(int revisionNumber, int actualLen, int baseRevision, int linkRevision, int parent1Revision, int parent2Revision, byte[] nodeid, DataAccess data) {
273 public void addChild(HistoryNode child) { 270 if (revisionNumber > 0) {
274 if (children == null) { 271 if (commitRevisions[revisionNumber-1] > linkRevision) {
275 children = new ArrayList<HistoryNode>(2); 272 needsSorting[0] = true;
276 } 273 }
277 children.add(child); 274 }
278 } 275 commitRevisions[revisionNumber] = linkRevision;
279 } 276 HistoryNode p1 = null, p2 = null;
280 final HistoryNode[] completeHistory = new HistoryNode[getRevisionCount()]; 277 if (parent1Revision != -1) {
281 final int[] commitRevisions = new int[completeHistory.length]; 278 p1 = completeHistory[parent1Revision];
282 RevlogStream.Inspector insp = new RevlogStream.Inspector() { 279 }
283 public void next(int revisionNumber, int actualLen, int baseRevision, int linkRevision, int parent1Revision, int parent2Revision, byte[] nodeid, DataAccess data) { 280 if (parent2Revision != -1) {
284 if (revisionNumber > 0) { 281 p2 = completeHistory[parent2Revision];
285 if (commitRevisions[revisionNumber-1] > linkRevision) { 282 }
286 needsSorting[0] = true; 283 completeHistory[revisionNumber] = new HistoryNode(linkRevision, p1, p2);
287 } 284 }
288 } 285 };
289 HistoryNode p1 = null, p2 = null; 286 content.iterate(0, getLastRevision(), false, insp);
290 if (parent1Revision != -1) { 287 cancelSupport.checkCancelled();
291 p1 = completeHistory[parent1Revision]; 288 if (needsSorting[0]) {
292 } 289 Arrays.sort(commitRevisions);
293 if (parent2Revision != -1) { 290 }
294 p2 = completeHistory[parent2Revision]; 291 // read changeset revisions at once (to avoid numerous changelog.getRevision reads)
295 } 292 // but just nodeids, not RawChangeset (changelog.iterate(data=false)
296 completeHistory[revisionNumber] = new HistoryNode(linkRevision, p1, p2); 293 ArrayList<Nodeid> changesetRevisions = new ArrayList<Nodeid>(commitRevisions.length);
297 } 294 getRepo().getChangelog().getRevisionsInternal(changesetRevisions, commitRevisions);
298 }; 295 cancelSupport.checkCancelled();
299 content.iterate(0, getLastRevision(), false, insp); 296 // assign them to corresponding HistoryNodes
300 // XXX may read changeset revisions at once (to avoid numerous changelog.getRevision reads) 297 for (int i = 0; i < completeHistory.length; i++ ) {
301 // but perhaps just nodeids, not RawChangeset (changelog.iterate(data=false) 298 final HistoryNode n = completeHistory[i];
302 // XXX sort completeHistory according to changeset numbers? 299 if (needsSorting[0]) {
303 return new HistoryWalker() { 300 int x = Arrays.binarySearch(commitRevisions, n.changeset);
304 private int index = -1; 301 assert x >= 0;
305 302 n.cset = changesetRevisions.get(x);
306 public void next() { 303 } else {
307 if (!hasNext()) { 304 // commit revisions were not sorted, may use original index directly
308 throw new NoSuchElementException(); 305 n.cset = changesetRevisions.get(i);
309 } 306 }
310 index++; 307 }
311 } 308 cancelSupport.checkCancelled();
312 309 // XXX shall sort completeHistory according to changeset numbers?
313 public boolean hasNext() { 310 for (int i = 0; i < completeHistory.length; i++ ) {
314 return index+1 < completeHistory.length; 311 final HistoryNode n = completeHistory[i];
315 }
316
317 public Pair<Nodeid, Nodeid> parentChangesets() {
318 HistoryNode p; 312 HistoryNode p;
319 Nodeid p1, p2; 313 Nodeid p1, p2;
320 if ((p = completeHistory[index].parent1) != null) { 314 if ((p = n.parent1) != null) {
321 p1 = p.changesetRevision(); 315 p1 = p.changesetRevision();
322 } else { 316 } else {
323 p1 = Nodeid.NULL; 317 p1 = Nodeid.NULL;
324 } 318 }
325 if ((p = completeHistory[index].parent2) != null) { 319 if ((p= n.parent2) != null) {
326 p2 = p.changesetRevision(); 320 p2 = p.changesetRevision();
327 } else { 321 } else {
328 p2 = Nodeid.NULL; 322 p2 = Nodeid.NULL;
329 } 323 }
330 return new Pair<Nodeid, Nodeid>(p1, p2); 324 final Pair<Nodeid, Nodeid> parentChangesets = new Pair<Nodeid, Nodeid>(p1, p2);
331 } 325 final List<Nodeid> childChangesets;
332
333 public boolean isJoin() {
334 return completeHistory[index].parent1 != null && completeHistory[index].parent2 != null;
335 }
336
337 public boolean isFork() {
338 HistoryNode n = completeHistory[index];
339 return n.children != null && n.children.size() > 1;
340 }
341
342 public Collection<Nodeid> childChangesets() {
343 HistoryNode n = completeHistory[index];
344 if (n.children == null) { 326 if (n.children == null) {
345 return Collections.emptyList(); 327 childChangesets = Collections.emptyList();
346 } 328 } else {
347 ArrayList<Nodeid> rv = new ArrayList<Nodeid>(n.children.size()); 329 Nodeid[] revisions = new Nodeid[n.children.size()];
348 for (HistoryNode hn : n.children) { 330 int j = 0;
349 rv.add(hn.changesetRevision()); 331 for (HistoryNode hn : n.children) {
350 } 332 revisions[j++] = hn.changesetRevision();
351 return rv; 333 }
352 } 334 childChangesets = Arrays.asList(revisions);
353 335 }
354 public Nodeid changesetRevision() { 336 inspector.next(n.changesetRevision(), parentChangesets, childChangesets);
355 return completeHistory[index].changesetRevision(); 337 cancelSupport.checkCancelled();
356 } 338 }
357 339 } catch (CancelledException ex) {
358 public RawChangeset changeset() { 340 return;
359 final int cs = completeHistory[index].changeset; 341 }
360 return getRepo().getChangelog().range(cs, cs).get(0);
361 }
362 };
363 } 342 }
364 343
365 public void history(HgChangelog.Inspector inspector) { 344 public void history(HgChangelog.Inspector inspector) {
366 history(0, getLastRevision(), inspector); 345 history(0, getLastRevision(), inspector);
367 } 346 }