comparison src/org/tmatesoft/hg/repo/HgBranches.java @ 656:a937e63b6e02

Performance: rebuild information about branches takes too long (my improvement: 3 times, 11-15 s to less than 4 sec)
author Artem Tikhomirov <tikhomirov.artem@gmail.com>
date Thu, 04 Jul 2013 18:40:03 +0200
parents 12a4f60ea972
children 6334b0267103
comparison
equal deleted inserted replaced
655:bcbcc318f250 656:a937e63b6e02
25 import java.io.FileReader; 25 import java.io.FileReader;
26 import java.io.FileWriter; 26 import java.io.FileWriter;
27 import java.io.IOException; 27 import java.io.IOException;
28 import java.util.ArrayList; 28 import java.util.ArrayList;
29 import java.util.Arrays; 29 import java.util.Arrays;
30 import java.util.Collections;
31 import java.util.HashMap; 30 import java.util.HashMap;
31 import java.util.Iterator;
32 import java.util.LinkedHashMap; 32 import java.util.LinkedHashMap;
33 import java.util.LinkedHashSet; 33 import java.util.LinkedHashSet;
34 import java.util.LinkedList; 34 import java.util.LinkedList;
35 import java.util.List; 35 import java.util.List;
36 import java.util.Map; 36 import java.util.Map;
119 } finally { 119 } finally {
120 new FileUtils(repo.getSessionContext().getLog(), this).closeQuietly(br); 120 new FileUtils(repo.getSessionContext().getLog(), this).closeQuietly(br);
121 } 121 }
122 return -1; // deliberately not lastInCache, to avoid anything but -1 when 1st line was read and there's error is in lines 2..end 122 return -1; // deliberately not lastInCache, to avoid anything but -1 when 1st line was read and there's error is in lines 2..end
123 } 123 }
124 124
125 void collect(final ProgressSupport ps) throws HgRuntimeException { 125 void collect(final ProgressSupport ps) throws HgRuntimeException {
126 branches.clear(); 126 branches.clear();
127 final HgRepository repo = internalRepo.getRepo(); 127 final HgRepository repo = internalRepo.getRepo();
128 ps.start(1 + repo.getChangelog().getRevisionCount() * 2); 128 ps.start(1 + repo.getChangelog().getRevisionCount() * 2);
129 // 129 //
131 isCacheActual = lastCached == repo.getChangelog().getLastRevision(); 131 isCacheActual = lastCached == repo.getChangelog().getLastRevision();
132 if (!isCacheActual) { 132 if (!isCacheActual) {
133 final HgParentChildMap<HgChangelog> pw = new HgParentChildMap<HgChangelog>(repo.getChangelog()); 133 final HgParentChildMap<HgChangelog> pw = new HgParentChildMap<HgChangelog>(repo.getChangelog());
134 pw.init(); 134 pw.init();
135 ps.worked(repo.getChangelog().getRevisionCount()); 135 ps.worked(repo.getChangelog().getRevisionCount());
136 //
136 // first revision branch found at 137 // first revision branch found at
137 final HashMap<String, Nodeid> branchStart = new HashMap<String, Nodeid>(); 138 final HashMap<String, Nodeid> branchStart = new HashMap<String, Nodeid>();
138 // last revision seen for the branch
139 final HashMap<String, Nodeid> branchLastSeen = new HashMap<String, Nodeid>();
140 // revisions from the branch that have no children at all 139 // revisions from the branch that have no children at all
141 final HashMap<String, List<Nodeid>> branchHeads = new HashMap<String, List<Nodeid>>(); 140 final HashMap<String, List<Nodeid>> branchHeads = new HashMap<String, List<Nodeid>>();
142 // revisions that are immediate children of a node from a given branch
143 // after iteration, there are some revisions left in this map (children of a branch last revision
144 // that doesn't belong to the branch. No use of this now, perhaps can deduce isInactive (e.g.those
145 // branches that have non-empty candidates are inactive if all their heads are roots for those left)
146 final HashMap<String, List<Nodeid>> branchHeadCandidates = new HashMap<String, List<Nodeid>>();
147 HgChangelog.Inspector insp = new HgChangelog.Inspector() { 141 HgChangelog.Inspector insp = new HgChangelog.Inspector() {
142
143 private final ArrayList<Nodeid> parents = new ArrayList<Nodeid>(3);
148 144
149 public void next(int revisionNumber, Nodeid nodeid, RawChangeset cset) { 145 public void next(int revisionNumber, Nodeid nodeid, RawChangeset cset) {
150 String branchName = cset.branch(); 146 String branchName = cset.branch();
147 List<Nodeid> _branchHeads;
148 // there are chances (with --force key) branch can get more than one start
149 // revision. Neither BranchInfo nor this code support this scenario at the moment.
151 if (!branchStart.containsKey(branchName)) { 150 if (!branchStart.containsKey(branchName)) {
152 branchStart.put(branchName, nodeid); 151 branchStart.put(branchName, nodeid);
153 branchHeads.put(branchName, new LinkedList<Nodeid>()); 152 branchHeads.put(branchName, _branchHeads = new LinkedList<Nodeid>());
154 branchHeadCandidates.put(branchName, new LinkedList<Nodeid>());
155 } else { 153 } else {
156 final List<Nodeid> headCandidates = branchHeadCandidates.get(branchName); 154 _branchHeads = branchHeads.get(branchName);
157 if (headCandidates.remove(nodeid)) { 155 if (_branchHeads == null) {
158 // likely we don't need to keep parent anymore, as we found at least 1 child thereof to be at the same branch 156 branchHeads.put(branchName, _branchHeads = new LinkedList<Nodeid>());
159 // however, it's possible the child we found is a result of an earlier fork, and revision in the 157 }
160 // branchLastSeen is 'parallel' head, which needs to be kept 158 }
161 Nodeid lastSeenInBranch = branchLastSeen.get(branchName); 159 // so far present node is the best candidate for head
162 // check if current revision is on descendant line. Seems direct parents check is enough 160 _branchHeads.add(nodeid);
163 if (pw.safeFirstParent(nodeid).equals(lastSeenInBranch) || pw.safeSecondParent(nodeid).equals(lastSeenInBranch)) { 161 parents.clear();
164 branchLastSeen.remove(branchName); 162 // parents of this node, however, cease to be heads (if they are from this branch)
163 pw.appendParentsOf(nodeid, parents);
164 _branchHeads.removeAll(parents);
165 ps.worked(1);
166 }
167 };
168 // XXX alternatively may iterate with pw.all().subList(lastCached)
169 // but need an effective way to find out branch of particular changeset
170 repo.getChangelog().range(lastCached == -1 ? 0 : lastCached+1, HgRepository.TIP, insp);
171 //
172 // build BranchInfo, based on found and cached
173 for (String bn : branchStart.keySet()) {
174 BranchInfo bi = branches.get(bn);
175 if (bi != null) {
176 // combine heads found so far with those cached
177 LinkedHashSet<Nodeid> oldHeads = new LinkedHashSet<Nodeid>(bi.getHeads());
178 // expect size of both oldHeads and newHeads sets to be small, and for x for hence acceptable.
179 for (Nodeid newHead : branchHeads.get(bn)) {
180 for (Iterator<Nodeid> it = oldHeads.iterator(); it.hasNext();) {
181 if (pw.isChild(it.next(), newHead)) {
182 it.remove();
165 } 183 }
166 } 184 }
167 } 185 }
168 List<Nodeid> immediateChildren = pw.directChildren(nodeid); 186 oldHeads.addAll(branchHeads.get(bn));
169 if (immediateChildren.size() > 0) { 187 assert oldHeads.size() > 0;
170 // 1) children may be in another branch 188 bi = new BranchInfo(bn, bi.getStart(), oldHeads.toArray(new Nodeid[oldHeads.size()]));
171 // and unless we later came across another element from this branch,
172 // we need to record all these as potential heads
173 //
174 // 2) head1 with children in different branch, and head2 in this branch without children
175 branchLastSeen.put(branchName, nodeid);
176 branchHeadCandidates.get(branchName).addAll(immediateChildren);
177 } else {
178 // no more children known for this node, it's (one of the) head of the branch
179 branchHeads.get(branchName).add(nodeid);
180 }
181 ps.worked(1);
182 }
183 };
184 repo.getChangelog().range(lastCached == -1 ? 0 : lastCached+1, HgRepository.TIP, insp);
185 // those last seen revisions from the branch that had no children from the same branch are heads.
186 for (String bn : branchLastSeen.keySet()) {
187 // these are inactive branches? - there were children, but not from the same branch?
188 branchHeads.get(bn).add(branchLastSeen.get(bn));
189 }
190 for (String bn : branchStart.keySet()) {
191 BranchInfo bi = branches.get(bn);
192 if (bi != null) {
193 // although heads from cache shall not intersect with heads after lastCached,
194 // use of LHS doesn't hurt (and makes sense e.g. if cache is not completely correct in my tests)
195 LinkedHashSet<Nodeid> heads = new LinkedHashSet<Nodeid>(bi.getHeads());
196 for (Nodeid oldHead : bi.getHeads()) {
197 // XXX perhaps, need pw.canReach(Nodeid from, Collection<Nodeid> to)
198 List<Nodeid> newChildren = pw.childrenOf(Collections.singletonList(oldHead));
199 if (!newChildren.isEmpty()) {
200 // likely not a head any longer,
201 // check if any new head can be reached from old one, and, if yes,
202 // do not consider that old head as head.
203 for (Nodeid newHead : branchHeads.get(bn)) {
204 if (newChildren.contains(newHead)) {
205 heads.remove(oldHead);
206 break;
207 }
208 }
209 } // else - oldHead still head for the branch
210 }
211 heads.addAll(branchHeads.get(bn));
212 bi = new BranchInfo(bn, bi.getStart(), heads.toArray(new Nodeid[0]));
213 } else { 189 } else {
214 Nodeid[] heads = branchHeads.get(bn).toArray(new Nodeid[0]); 190 Nodeid[] heads = branchHeads.get(bn).toArray(new Nodeid[0]);
215 bi = new BranchInfo(bn, branchStart.get(bn), heads); 191 bi = new BranchInfo(bn, branchStart.get(bn), heads);
216 } 192 }
217 branches.put(bn, bi); 193 branches.put(bn, bi);
218 } 194 }
219 } 195 } // !cacheActual
220 final HgChangelog clog = repo.getChangelog(); 196 final HgChangelog clog = repo.getChangelog();
197 /// FIXME use HgParentChildMap if available (need to decide how to get HgRevisionMap and HgParentChildMap to a common denominator)
221 final HgRevisionMap<HgChangelog> rmap = new HgRevisionMap<HgChangelog>(clog).init(); 198 final HgRevisionMap<HgChangelog> rmap = new HgRevisionMap<HgChangelog>(clog).init();
222 for (BranchInfo bi : branches.values()) { 199 for (BranchInfo bi : branches.values()) {
223 bi.validate(clog, rmap); 200 bi.validate(clog, rmap);
224 } 201 }
225 repoChangeTracker.touch(); 202 repoChangeTracker.touch();
386 } 363 }
387 return closedHeads.contains(head); 364 return closedHeads.contains(head);
388 } 365 }
389 // public Nodeid getTip() { 366 // public Nodeid getTip() {
390 // } 367 // }
368 // XXX Not public as there are chances for few possible branch starts, and I need to decide how to handle that
391 /*public*/ Nodeid getStart() { 369 /*public*/ Nodeid getStart() {
392 // first node where branch appears 370 // first node where branch appears
393 return start; 371 return start;
394 } 372 }
395 } 373 }