Mercurial > jhg
comparison src/org/tmatesoft/hg/repo/HgWorkingCopyStatusCollector.java @ 94:af1f3b78b918
*StatusCollector renamed to Hg*StatusCollector
author | Artem Tikhomirov <tikhomirov.artem@gmail.com> |
---|---|
date | Thu, 27 Jan 2011 21:18:47 +0100 |
parents | src/org/tmatesoft/hg/repo/WorkingCopyStatusCollector.java@d55d4eedfc57 |
children | a3a2e5deb320 |
comparison
equal
deleted
inserted
replaced
93:d55d4eedfc57 | 94:af1f3b78b918 |
---|---|
1 /* | |
2 * Copyright (c) 2011 TMate Software Ltd | |
3 * | |
4 * This program is free software; you can redistribute it and/or modify | |
5 * it under the terms of the GNU General Public License as published by | |
6 * the Free Software Foundation; version 2 of the License. | |
7 * | |
8 * This program is distributed in the hope that it will be useful, | |
9 * but WITHOUT ANY WARRANTY; without even the implied warranty of | |
10 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
11 * GNU General Public License for more details. | |
12 * | |
13 * For information on how to redistribute this software under | |
14 * the terms of a license other than GNU General Public License | |
15 * contact TMate Software at support@svnkit.com | |
16 */ | |
17 package org.tmatesoft.hg.repo; | |
18 | |
19 import static org.tmatesoft.hg.repo.HgRepository.BAD_REVISION; | |
20 import static org.tmatesoft.hg.repo.HgRepository.TIP; | |
21 | |
22 import java.io.BufferedInputStream; | |
23 import java.io.File; | |
24 import java.io.FileInputStream; | |
25 import java.io.IOException; | |
26 import java.util.Collections; | |
27 import java.util.Set; | |
28 import java.util.TreeSet; | |
29 | |
30 import org.tmatesoft.hg.core.Nodeid; | |
31 import org.tmatesoft.hg.repo.HgStatusCollector.ManifestRevisionInspector; | |
32 import org.tmatesoft.hg.util.FileWalker; | |
33 import org.tmatesoft.hg.util.PathPool; | |
34 import org.tmatesoft.hg.util.PathRewrite; | |
35 | |
36 /** | |
37 * | |
38 * @author Artem Tikhomirov | |
39 * @author TMate Software Ltd. | |
40 */ | |
41 public class HgWorkingCopyStatusCollector { | |
42 | |
43 private final HgRepository repo; | |
44 private final FileWalker repoWalker; | |
45 private HgDirstate dirstate; | |
46 private HgStatusCollector baseRevisionCollector; | |
47 private PathPool pathPool; | |
48 | |
49 public HgWorkingCopyStatusCollector(HgRepository hgRepo) { | |
50 this(hgRepo, hgRepo.createWorkingDirWalker()); | |
51 } | |
52 | |
53 HgWorkingCopyStatusCollector(HgRepository hgRepo, FileWalker hgRepoWalker) { | |
54 this.repo = hgRepo; | |
55 this.repoWalker = hgRepoWalker; | |
56 } | |
57 | |
58 /** | |
59 * Optionally, supply a collector instance that may cache (or have already cached) base revision | |
60 * @param sc may be null | |
61 */ | |
62 public void setBaseRevisionCollector(HgStatusCollector sc) { | |
63 baseRevisionCollector = sc; | |
64 } | |
65 | |
66 /*package-local*/ PathPool getPathPool() { | |
67 if (pathPool == null) { | |
68 if (baseRevisionCollector == null) { | |
69 pathPool = new PathPool(new PathRewrite.Empty()); | |
70 } else { | |
71 return baseRevisionCollector.getPathPool(); | |
72 } | |
73 } | |
74 return pathPool; | |
75 } | |
76 | |
77 public void setPathPool(PathPool pathPool) { | |
78 this.pathPool = pathPool; | |
79 } | |
80 | |
81 | |
82 private HgDirstate getDirstate() { | |
83 if (dirstate == null) { | |
84 dirstate = repo.loadDirstate(); | |
85 } | |
86 return dirstate; | |
87 } | |
88 | |
89 // may be invoked few times | |
90 public void walk(int baseRevision, HgStatusInspector inspector) { | |
91 final HgIgnore hgIgnore = repo.getIgnore(); | |
92 TreeSet<String> knownEntries = getDirstate().all(); | |
93 final boolean isTipBase; | |
94 if (baseRevision == TIP) { | |
95 baseRevision = repo.getManifest().getRevisionCount() - 1; | |
96 isTipBase = true; | |
97 } else { | |
98 isTipBase = baseRevision == repo.getManifest().getRevisionCount() - 1; | |
99 } | |
100 HgStatusCollector.ManifestRevisionInspector collect = null; | |
101 Set<String> baseRevFiles = Collections.emptySet(); | |
102 if (!isTipBase) { | |
103 if (baseRevisionCollector != null) { | |
104 collect = baseRevisionCollector.raw(baseRevision); | |
105 } else { | |
106 collect = new HgStatusCollector.ManifestRevisionInspector(); | |
107 repo.getManifest().walk(baseRevision, baseRevision, collect); | |
108 } | |
109 baseRevFiles = new TreeSet<String>(collect.files()); | |
110 } | |
111 if (inspector instanceof HgStatusCollector.Record) { | |
112 HgStatusCollector sc = baseRevisionCollector == null ? new HgStatusCollector(repo) : baseRevisionCollector; | |
113 ((HgStatusCollector.Record) inspector).init(baseRevision, BAD_REVISION, sc); | |
114 } | |
115 repoWalker.reset(); | |
116 final PathPool pp = getPathPool(); | |
117 while (repoWalker.hasNext()) { | |
118 repoWalker.next(); | |
119 String fname = repoWalker.name(); | |
120 File f = repoWalker.file(); | |
121 if (hgIgnore.isIgnored(fname)) { | |
122 inspector.ignored(pp.path(fname)); | |
123 } else if (knownEntries.remove(fname)) { | |
124 // modified, added, removed, clean | |
125 if (collect != null) { // need to check against base revision, not FS file | |
126 checkLocalStatusAgainstBaseRevision(baseRevFiles, collect, baseRevision, fname, f, inspector); | |
127 baseRevFiles.remove(fname); | |
128 } else { | |
129 checkLocalStatusAgainstFile(fname, f, inspector); | |
130 } | |
131 } else { | |
132 inspector.unknown(pp.path(fname)); | |
133 } | |
134 } | |
135 if (collect != null) { | |
136 for (String r : baseRevFiles) { | |
137 inspector.removed(pp.path(r)); | |
138 } | |
139 } | |
140 for (String m : knownEntries) { | |
141 // missing known file from a working dir | |
142 if (getDirstate().checkRemoved(m) == null) { | |
143 // not removed from the repository = 'deleted' | |
144 inspector.missing(pp.path(m)); | |
145 } else { | |
146 // removed from the repo | |
147 // if we check against non-tip revision, do not report files that were added past that revision and now removed. | |
148 if (collect == null || baseRevFiles.contains(m)) { | |
149 inspector.removed(pp.path(m)); | |
150 } | |
151 } | |
152 } | |
153 } | |
154 | |
155 public HgStatusCollector.Record status(int baseRevision) { | |
156 HgStatusCollector.Record rv = new HgStatusCollector.Record(); | |
157 walk(baseRevision, rv); | |
158 return rv; | |
159 } | |
160 | |
161 //******************************************** | |
162 | |
163 | |
164 private void checkLocalStatusAgainstFile(String fname, File f, HgStatusInspector inspector) { | |
165 HgDirstate.Record r; | |
166 if ((r = getDirstate().checkNormal(fname)) != null) { | |
167 // either clean or modified | |
168 if (f.lastModified() / 1000 == r.time && r.size == f.length()) { | |
169 inspector.clean(getPathPool().path(fname)); | |
170 } else { | |
171 // FIXME check actual content to avoid false modified files | |
172 inspector.modified(getPathPool().path(fname)); | |
173 } | |
174 } else if ((r = getDirstate().checkAdded(fname)) != null) { | |
175 if (r.name2 == null) { | |
176 inspector.added(getPathPool().path(fname)); | |
177 } else { | |
178 inspector.copied(getPathPool().path(r.name2), getPathPool().path(fname)); | |
179 } | |
180 } else if ((r = getDirstate().checkRemoved(fname)) != null) { | |
181 inspector.removed(getPathPool().path(fname)); | |
182 } else if ((r = getDirstate().checkMerged(fname)) != null) { | |
183 inspector.modified(getPathPool().path(fname)); | |
184 } | |
185 } | |
186 | |
187 // XXX refactor checkLocalStatus methods in more OO way | |
188 private void checkLocalStatusAgainstBaseRevision(Set<String> baseRevNames, ManifestRevisionInspector collect, int baseRevision, String fname, File f, HgStatusInspector inspector) { | |
189 // fname is in the dirstate, either Normal, Added, Removed or Merged | |
190 Nodeid nid1 = collect.nodeid(fname); | |
191 String flags = collect.flags(fname); | |
192 HgDirstate.Record r; | |
193 if (nid1 == null) { | |
194 // normal: added? | |
195 // added: not known at the time of baseRevision, shall report | |
196 // merged: was not known, report as added? | |
197 if ((r = getDirstate().checkNormal(fname)) != null) { | |
198 String origin = HgStatusCollector.getOriginIfCopy(repo, fname, baseRevNames, baseRevision); | |
199 if (origin != null) { | |
200 inspector.copied(getPathPool().path(origin), getPathPool().path(fname)); | |
201 return; | |
202 } | |
203 } else if ((r = getDirstate().checkAdded(fname)) != null) { | |
204 if (r.name2 != null && baseRevNames.contains(r.name2)) { | |
205 baseRevNames.remove(r.name2); // XXX surely I shall not report rename source as Removed? | |
206 inspector.copied(getPathPool().path(r.name2), getPathPool().path(fname)); | |
207 return; | |
208 } | |
209 // fall-through, report as added | |
210 } else if (getDirstate().checkRemoved(fname) != null) { | |
211 // removed: removed file was not known at the time of baseRevision, and we should not report it as removed | |
212 return; | |
213 } | |
214 inspector.added(getPathPool().path(fname)); | |
215 } else { | |
216 // was known; check whether clean or modified | |
217 // when added - seems to be the case of a file added once again, hence need to check if content is different | |
218 if ((r = getDirstate().checkNormal(fname)) != null || (r = getDirstate().checkMerged(fname)) != null || (r = getDirstate().checkAdded(fname)) != null) { | |
219 // either clean or modified | |
220 HgDataFile fileNode = repo.getFileNode(fname); | |
221 final int lengthAtRevision = fileNode.length(nid1); | |
222 if (r.size /* XXX File.length() ?! */ != lengthAtRevision || flags != todoGenerateFlags(fname /*java.io.File*/)) { | |
223 inspector.modified(getPathPool().path(fname)); | |
224 } else { | |
225 // check actual content to see actual changes | |
226 // XXX consider adding HgDataDile.compare(File/byte[]/whatever) operation to optimize comparison | |
227 if (areTheSame(f, fileNode.content(nid1))) { | |
228 inspector.clean(getPathPool().path(fname)); | |
229 } else { | |
230 inspector.modified(getPathPool().path(fname)); | |
231 } | |
232 } | |
233 } | |
234 // only those left in idsMap after processing are reported as removed | |
235 } | |
236 | |
237 // TODO think over if content comparison may be done more effectively by e.g. calculating nodeid for a local file and comparing it with nodeid from manifest | |
238 // we don't need to tell exact difference, hash should be enough to detect difference, and it doesn't involve reading historical file content, and it's relatively | |
239 // cheap to calc hash on a file (no need to keep it completely in memory). OTOH, if I'm right that the next approach is used for nodeids: | |
240 // changeset nodeid + hash(actual content) => entry (Nodeid) in the next Manifest | |
241 // then it's sufficient to check parents from dirstate, and if they do not match parents from file's baseRevision (non matching parents means different nodeids). | |
242 // The question is whether original Hg treats this case (same content, different parents and hence nodeids) as 'modified' or 'clean' | |
243 } | |
244 | |
245 private static String todoGenerateFlags(String fname) { | |
246 // FIXME implement | |
247 return null; | |
248 } | |
249 private static boolean areTheSame(File f, byte[] data) { | |
250 try { | |
251 BufferedInputStream is = new BufferedInputStream(new FileInputStream(f)); | |
252 int i = 0; | |
253 while (i < data.length && data[i] == is.read()) { | |
254 i++; // increment only for successful match, otherwise won't tell last byte in data was the same as read from the stream | |
255 } | |
256 return i == data.length && is.read() == -1; // although data length is expected to be the same (see caller), check that we reached EOF, no more data left. | |
257 } catch (IOException ex) { | |
258 ex.printStackTrace(); // log warn | |
259 } | |
260 return false; | |
261 } | |
262 | |
263 } |