Mercurial > hg4j
comparison src/org/tmatesoft/hg/repo/HgBranches.java @ 308:3f40262153a4
Recognize closed branches
author | Artem Tikhomirov <tikhomirov.artem@gmail.com> |
---|---|
date | Sat, 24 Sep 2011 07:29:05 +0200 |
parents | 981f9f50bb6c |
children | 962f78aac342 |
comparison
equal
deleted
inserted
replaced
307:2f2ab5c27f41 | 308:3f40262153a4 |
---|---|
20 import java.io.BufferedWriter; | 20 import java.io.BufferedWriter; |
21 import java.io.File; | 21 import java.io.File; |
22 import java.io.FileReader; | 22 import java.io.FileReader; |
23 import java.io.FileWriter; | 23 import java.io.FileWriter; |
24 import java.io.IOException; | 24 import java.io.IOException; |
25 import java.util.ArrayList; | |
26 import java.util.Arrays; | 25 import java.util.Arrays; |
27 import java.util.Collections; | 26 import java.util.Collections; |
28 import java.util.HashMap; | 27 import java.util.HashMap; |
29 import java.util.HashSet; | 28 import java.util.LinkedHashMap; |
30 import java.util.LinkedHashSet; | 29 import java.util.LinkedHashSet; |
31 import java.util.LinkedList; | 30 import java.util.LinkedList; |
32 import java.util.List; | 31 import java.util.List; |
33 import java.util.Map; | 32 import java.util.Map; |
34 import java.util.TreeMap; | 33 import java.util.TreeMap; |
61 return lastInCache; | 60 return lastInCache; |
62 } | 61 } |
63 BufferedReader br = null; | 62 BufferedReader br = null; |
64 final Pattern spacePattern = Pattern.compile(" "); | 63 final Pattern spacePattern = Pattern.compile(" "); |
65 try { | 64 try { |
65 final LinkedHashMap<String, List<Nodeid>> branchHeads = new LinkedHashMap<String, List<Nodeid>>(); | |
66 br = new BufferedReader(new FileReader(branchheadsCache)); | 66 br = new BufferedReader(new FileReader(branchheadsCache)); |
67 String line = br.readLine(); | 67 String line = br.readLine(); |
68 if (line == null || line.trim().length() == 0) { | 68 if (line == null || line.trim().length() == 0) { |
69 return lastInCache; | 69 return lastInCache; |
70 } | 70 } |
72 lastInCache = Integer.parseInt(cacheIdentity[1]); | 72 lastInCache = Integer.parseInt(cacheIdentity[1]); |
73 // XXX may want to check if nodeid of cset from repo.getChangelog() of lastInCache index match cacheIdentity[0] | 73 // XXX may want to check if nodeid of cset from repo.getChangelog() of lastInCache index match cacheIdentity[0] |
74 // | 74 // |
75 while ((line = br.readLine()) != null) { | 75 while ((line = br.readLine()) != null) { |
76 String[] elements = spacePattern.split(line.trim()); | 76 String[] elements = spacePattern.split(line.trim()); |
77 if (elements.length < 2) { | 77 if (elements.length != 2) { |
78 // bad entry | 78 // bad entry |
79 continue; | 79 continue; |
80 } | 80 } |
81 Nodeid[] branchHeads = new Nodeid[elements.length - 1]; | |
82 for (int i = 0; i < elements.length - 1; i++) { | |
83 branchHeads[i] = Nodeid.fromAscii(elements[i]); | |
84 } | |
85 // I assume split returns substrings of the original string, hence copy of a branch name | 81 // I assume split returns substrings of the original string, hence copy of a branch name |
86 String branchName = new String(elements[elements.length-1]); | 82 String branchName = new String(elements[elements.length-1]); |
87 BranchInfo bi = new BranchInfo(branchName, branchHeads); | 83 List<Nodeid> heads = branchHeads.get(elements[1]); |
88 branches.put(branchName, bi); | 84 if (heads == null) { |
85 branchHeads.put(branchName, heads = new LinkedList<Nodeid>()); | |
86 } | |
87 heads.add(Nodeid.fromAscii(elements[0])); | |
88 } | |
89 for (Map.Entry<String, List<Nodeid>> e : branchHeads.entrySet()) { | |
90 Nodeid[] heads = e.getValue().toArray(new Nodeid[e.getValue().size()]); | |
91 BranchInfo bi = new BranchInfo(e.getKey(), heads); | |
92 branches.put(e.getKey(), bi); | |
89 } | 93 } |
90 return lastInCache; | 94 return lastInCache; |
91 } catch (IOException ex) { | 95 } catch (IOException ex) { |
92 repo.getContext().getLog().warn(getClass(), ex, null); // log error, but otherwise do nothing | 96 repo.getContext().getLog().warn(getClass(), ex, null); // log error, but otherwise do nothing |
93 } finally { | 97 } finally { |
169 isCacheActual = lastCached == repo.getChangelog().getLastRevision(); | 173 isCacheActual = lastCached == repo.getChangelog().getLastRevision(); |
170 if (!isCacheActual) { | 174 if (!isCacheActual) { |
171 final HgChangelog.ParentWalker pw = repo.getChangelog().new ParentWalker(); | 175 final HgChangelog.ParentWalker pw = repo.getChangelog().new ParentWalker(); |
172 pw.init(); | 176 pw.init(); |
173 ps.worked(repo.getChangelog().getRevisionCount()); | 177 ps.worked(repo.getChangelog().getRevisionCount()); |
178 // first revision branch found at | |
174 final HashMap<String, Nodeid> branchStart = new HashMap<String, Nodeid>(); | 179 final HashMap<String, Nodeid> branchStart = new HashMap<String, Nodeid>(); |
180 // last revision seen for the branch | |
175 final HashMap<String, Nodeid> branchLastSeen = new HashMap<String, Nodeid>(); | 181 final HashMap<String, Nodeid> branchLastSeen = new HashMap<String, Nodeid>(); |
182 // revisions from the branch that have no children at all | |
176 final HashMap<String, List<Nodeid>> branchHeads = new HashMap<String, List<Nodeid>>(); | 183 final HashMap<String, List<Nodeid>> branchHeads = new HashMap<String, List<Nodeid>>(); |
177 final HashSet<String> closedBranches = new HashSet<String>(); | 184 // revisions that are immediate children of a node from a given branch |
185 final HashMap<String, List<Nodeid>> branchHeadCandidates = new HashMap<String, List<Nodeid>>(); | |
178 HgChangelog.Inspector insp = new HgChangelog.Inspector() { | 186 HgChangelog.Inspector insp = new HgChangelog.Inspector() { |
179 | 187 |
180 public void next(int revisionNumber, Nodeid nodeid, RawChangeset cset) { | 188 public void next(int revisionNumber, Nodeid nodeid, RawChangeset cset) { |
181 String branchName = cset.branch(); | 189 String branchName = cset.branch(); |
182 if (!branchStart.containsKey(branchName)) { | 190 if (!branchStart.containsKey(branchName)) { |
183 branchStart.put(branchName, nodeid); | 191 branchStart.put(branchName, nodeid); |
184 branchHeads.put(branchName, new LinkedList<Nodeid>()); | 192 branchHeads.put(branchName, new LinkedList<Nodeid>()); |
185 } | 193 branchHeadCandidates.put(branchName, new LinkedList<Nodeid>()); |
186 branchLastSeen.remove(branchName); | |
187 if ("1".equals(cset.extras().get("close"))) { | |
188 branchHeads.get(branchName).add(nodeid); // XXX what if it still has children? | |
189 closedBranches.add(branchName); | |
190 } else { | 194 } else { |
191 if (pw.hasChildren(nodeid)) { | 195 final List<Nodeid> headCandidates = branchHeadCandidates.get(branchName); |
192 // children may be in another branch | 196 if (headCandidates.remove(nodeid)) { |
193 // and unless we later came across another element from this branch, | 197 // no need to keep parent, as we found at least 1 child thereof to be at the same branch |
194 // we need to record all these as valid heads | 198 branchLastSeen.remove(branchName); |
195 // XXX what about next case: head1 with children in different branch, and head2 without children | |
196 // head1 would get lost | |
197 branchLastSeen.put(branchName, nodeid); | |
198 } else { | |
199 // no more children known for this node, it's (one of the) head of the branch | |
200 branchHeads.get(branchName).add(nodeid); | |
201 } | 199 } |
200 } | |
201 List<Nodeid> immediateChildren = pw.directChildren(nodeid); | |
202 if (immediateChildren.size() > 0) { | |
203 // 1) children may be in another branch | |
204 // and unless we later came across another element from this branch, | |
205 // we need to record all these as potential heads | |
206 // | |
207 // 2) head1 with children in different branch, and head2 in this branch without children | |
208 branchLastSeen.put(branchName, nodeid); | |
209 branchHeadCandidates.get(branchName).addAll(immediateChildren); | |
210 } else { | |
211 // no more children known for this node, it's (one of the) head of the branch | |
212 branchHeads.get(branchName).add(nodeid); | |
202 } | 213 } |
203 ps.worked(1); | 214 ps.worked(1); |
204 } | 215 } |
205 }; | 216 }; |
206 repo.getChangelog().range(lastCached == -1 ? 0 : lastCached+1, HgRepository.TIP, insp); | 217 repo.getChangelog().range(lastCached == -1 ? 0 : lastCached+1, HgRepository.TIP, insp); |
218 // System.out.println("HEAD CANDIDATES>>>"); | |
219 // for (String bn : branchHeadCandidates.keySet()) { | |
220 // System.out.println(bn + ":" + branchHeadCandidates.get(bn).toString()); | |
221 // } | |
222 // System.out.println("HEAD CANDIDATES<<<"); | |
223 // those last seen revisions from the branch that had no children from the same branch are heads. | |
207 for (String bn : branchLastSeen.keySet()) { | 224 for (String bn : branchLastSeen.keySet()) { |
225 // these are inactive branches? - there were children, but not from the same branch? | |
208 branchHeads.get(bn).add(branchLastSeen.get(bn)); | 226 branchHeads.get(bn).add(branchLastSeen.get(bn)); |
209 } | 227 } |
210 for (String bn : branchStart.keySet()) { | 228 for (String bn : branchStart.keySet()) { |
211 BranchInfo bi = branches.get(bn); | 229 BranchInfo bi = branches.get(bn); |
212 if (bi != null) { | 230 if (bi != null) { |
227 } | 245 } |
228 } | 246 } |
229 } // else - oldHead still head for the branch | 247 } // else - oldHead still head for the branch |
230 } | 248 } |
231 heads.addAll(branchHeads.get(bn)); | 249 heads.addAll(branchHeads.get(bn)); |
232 bi = new BranchInfo(bn, bi.getStart(), heads.toArray(new Nodeid[0]), bi.isClosed() && closedBranches.contains(bn)); | 250 bi = new BranchInfo(bn, bi.getStart(), heads.toArray(new Nodeid[0])); |
233 } else { | 251 } else { |
234 Nodeid[] heads = branchHeads.get(bn).toArray(new Nodeid[0]); | 252 Nodeid[] heads = branchHeads.get(bn).toArray(new Nodeid[0]); |
235 bi = new BranchInfo(bn, branchStart.get(bn), heads, closedBranches.contains(bn)); | 253 bi = new BranchInfo(bn, branchStart.get(bn), heads); |
236 } | 254 } |
237 branches.put(bn, bi); | 255 branches.put(bn, bi); |
238 } | 256 } |
257 } | |
258 final HgChangelog clog = repo.getChangelog(); | |
259 final HgChangelog.RevisionMap rmap = clog.new RevisionMap().init(); | |
260 for (BranchInfo bi : branches.values()) { | |
261 bi.validate(clog, rmap); | |
239 } | 262 } |
240 ps.done(); | 263 ps.done(); |
241 } | 264 } |
242 | 265 |
243 public List<BranchInfo> getAllBranches() { | 266 public List<BranchInfo> getAllBranches() { |
293 return new File(repo.getRepositoryRoot(), "cache/branchheads"); | 316 return new File(repo.getRepositoryRoot(), "cache/branchheads"); |
294 } | 317 } |
295 | 318 |
296 public static class BranchInfo { | 319 public static class BranchInfo { |
297 private final String name; | 320 private final String name; |
298 private final List<Nodeid> heads; | 321 private List<Nodeid> heads; |
299 private final boolean closed; | 322 private boolean closed; |
300 private final Nodeid start; | 323 private final Nodeid start; |
301 | 324 |
302 // XXX in fact, few but not all branchHeads might be closed, and isClosed for whole branch is not | 325 // XXX in fact, few but not all branchHeads might be closed, and isClosed for whole branch is not |
303 // possible to determine. | 326 // possible to determine. |
304 BranchInfo(String branchName, Nodeid first, Nodeid[] branchHeads, boolean isClosed) { | 327 BranchInfo(String branchName, Nodeid first, Nodeid[] branchHeads) { |
305 name = branchName; | 328 name = branchName; |
306 start = first; | 329 start = first; |
307 heads = Collections.unmodifiableList(new ArrayList<Nodeid>(Arrays.asList(branchHeads))); | 330 heads = Arrays.asList(branchHeads); |
308 closed = isClosed; | |
309 } | 331 } |
310 | 332 |
311 // incomplete branch, there's not enough information at the time of creation. shall be replaced with | 333 // incomplete branch, there's not enough information at the time of creation. shall be replaced with |
312 // proper BI in #collect() | 334 // proper BI in #collect() |
313 BranchInfo(String branchName, Nodeid[] branchHeads) { | 335 BranchInfo(String branchName, Nodeid[] branchHeads) { |
314 this(branchName, Nodeid.NULL, branchHeads, false); | 336 this(branchName, Nodeid.NULL, branchHeads); |
337 } | |
338 | |
339 void validate(HgChangelog clog, HgChangelog.RevisionMap rmap) { | |
340 int[] localCset = new int[heads.size()]; | |
341 int i = 0; | |
342 for (Nodeid h : heads) { | |
343 localCset[i++] = rmap.localRevision(h); | |
344 } | |
345 // [0] tipmost, [1] tipmost open | |
346 final Nodeid[] tipmost = new Nodeid[] {null, null}; | |
347 final boolean[] allClosed = new boolean[] { true }; | |
348 clog.range(new HgChangelog.Inspector() { | |
349 | |
350 public void next(int revisionNumber, Nodeid nodeid, RawChangeset cset) { | |
351 assert heads.contains(nodeid); | |
352 tipmost[0] = nodeid; | |
353 if (!"1".equals(cset.extras().get("close"))) { | |
354 tipmost[1] = nodeid; | |
355 allClosed[0] = false; | |
356 } | |
357 } | |
358 }, localCset); | |
359 closed = allClosed[0]; | |
360 Nodeid[] outcome = new Nodeid[localCset.length]; | |
361 i = 0; | |
362 if (!closed && tipmost[1] != null) { | |
363 outcome[i++] = tipmost[1]; | |
364 if (i < outcome.length && !tipmost[0].equals(tipmost[1])) { | |
365 outcome[i++] = tipmost[0]; | |
366 } | |
367 } else { | |
368 outcome[i++] = tipmost[0]; | |
369 } | |
370 for (Nodeid h : heads) { | |
371 if (!h.equals(tipmost[0]) && !h.equals(tipmost[1])) { | |
372 outcome[i++] = h; | |
373 } | |
374 } | |
375 heads = Arrays.asList(outcome); | |
315 } | 376 } |
316 | 377 |
317 public String getName() { | 378 public String getName() { |
318 return name; | 379 return name; |
319 } | 380 } |
320 /*public*/ boolean isClosed() { | 381 /** |
382 * @return <code>true</code> if all heads of this branch are marked as closed | |
383 */ | |
384 public boolean isClosed() { | |
321 return closed; | 385 return closed; |
322 } | 386 } |
323 public List<Nodeid> getHeads() { | 387 public List<Nodeid> getHeads() { |
324 return heads; | 388 return heads; |
325 } | 389 } |