Mercurial > jhg
comparison src/org/tmatesoft/hg/repo/HgWorkingCopyStatusCollector.java @ 157:d5268ca7715b
Merged branch wrap-data-access into default for resource-friendly data access. Updated API to promote that friendliness to clients (channels, not byte[]). More exceptions
author | Artem Tikhomirov <tikhomirov.artem@gmail.com> |
---|---|
date | Wed, 09 Mar 2011 05:22:17 +0100 |
parents | 1a7a9a20e1f9 |
children | 42fe9a94b9d0 |
comparison
equal
deleted
inserted
replaced
156:643ddec3be36 | 157:d5268ca7715b |
---|---|
28 import java.nio.channels.FileChannel; | 28 import java.nio.channels.FileChannel; |
29 import java.util.Collections; | 29 import java.util.Collections; |
30 import java.util.Set; | 30 import java.util.Set; |
31 import java.util.TreeSet; | 31 import java.util.TreeSet; |
32 | 32 |
33 import org.tmatesoft.hg.core.HgDataStreamException; | |
34 import org.tmatesoft.hg.core.HgException; | |
33 import org.tmatesoft.hg.core.Nodeid; | 35 import org.tmatesoft.hg.core.Nodeid; |
36 import org.tmatesoft.hg.internal.ByteArrayChannel; | |
34 import org.tmatesoft.hg.internal.FilterByteChannel; | 37 import org.tmatesoft.hg.internal.FilterByteChannel; |
35 import org.tmatesoft.hg.repo.HgStatusCollector.ManifestRevisionInspector; | 38 import org.tmatesoft.hg.repo.HgStatusCollector.ManifestRevisionInspector; |
36 import org.tmatesoft.hg.util.ByteChannel; | 39 import org.tmatesoft.hg.util.ByteChannel; |
40 import org.tmatesoft.hg.util.CancelledException; | |
37 import org.tmatesoft.hg.util.FileIterator; | 41 import org.tmatesoft.hg.util.FileIterator; |
38 import org.tmatesoft.hg.util.Path; | 42 import org.tmatesoft.hg.util.Path; |
39 import org.tmatesoft.hg.util.PathPool; | 43 import org.tmatesoft.hg.util.PathPool; |
40 import org.tmatesoft.hg.util.PathRewrite; | 44 import org.tmatesoft.hg.util.PathRewrite; |
41 | 45 |
174 if (f.lastModified() / 1000 == r.time && r.size == f.length()) { | 178 if (f.lastModified() / 1000 == r.time && r.size == f.length()) { |
175 inspector.clean(getPathPool().path(fname)); | 179 inspector.clean(getPathPool().path(fname)); |
176 } else { | 180 } else { |
177 // check actual content to avoid false modified files | 181 // check actual content to avoid false modified files |
178 HgDataFile df = repo.getFileNode(fname); | 182 HgDataFile df = repo.getFileNode(fname); |
179 if (!areTheSame(f, df.content(), df.getPath())) { | 183 if (!areTheSame(f, df, HgRepository.TIP)) { |
180 inspector.modified(df.getPath()); | 184 inspector.modified(df.getPath()); |
181 } | 185 } |
182 } | 186 } |
183 } else if ((r = getDirstate().checkAdded(fname)) != null) { | 187 } else if ((r = getDirstate().checkAdded(fname)) != null) { |
184 if (r.name2 == null) { | 188 if (r.name2 == null) { |
202 if (nid1 == null) { | 206 if (nid1 == null) { |
203 // normal: added? | 207 // normal: added? |
204 // added: not known at the time of baseRevision, shall report | 208 // added: not known at the time of baseRevision, shall report |
205 // merged: was not known, report as added? | 209 // merged: was not known, report as added? |
206 if ((r = getDirstate().checkNormal(fname)) != null) { | 210 if ((r = getDirstate().checkNormal(fname)) != null) { |
207 Path origin = HgStatusCollector.getOriginIfCopy(repo, fname, baseRevNames, baseRevision); | 211 try { |
208 if (origin != null) { | 212 Path origin = HgStatusCollector.getOriginIfCopy(repo, fname, baseRevNames, baseRevision); |
209 inspector.copied(getPathPool().path(origin), getPathPool().path(fname)); | 213 if (origin != null) { |
210 return; | 214 inspector.copied(getPathPool().path(origin), getPathPool().path(fname)); |
215 return; | |
216 } | |
217 } catch (HgDataStreamException ex) { | |
218 ex.printStackTrace(); | |
219 // FIXME report to a mediator, continue status collection | |
211 } | 220 } |
212 } else if ((r = getDirstate().checkAdded(fname)) != null) { | 221 } else if ((r = getDirstate().checkAdded(fname)) != null) { |
213 if (r.name2 != null && baseRevNames.contains(r.name2)) { | 222 if (r.name2 != null && baseRevNames.contains(r.name2)) { |
214 baseRevNames.remove(r.name2); // XXX surely I shall not report rename source as Removed? | 223 baseRevNames.remove(r.name2); // XXX surely I shall not report rename source as Removed? |
215 inspector.copied(getPathPool().path(r.name2), getPathPool().path(fname)); | 224 inspector.copied(getPathPool().path(r.name2), getPathPool().path(fname)); |
230 final int lengthAtRevision = fileNode.length(nid1); | 239 final int lengthAtRevision = fileNode.length(nid1); |
231 if (r.size /* XXX File.length() ?! */ != lengthAtRevision || flags != todoGenerateFlags(fname /*java.io.File*/)) { | 240 if (r.size /* XXX File.length() ?! */ != lengthAtRevision || flags != todoGenerateFlags(fname /*java.io.File*/)) { |
232 inspector.modified(getPathPool().path(fname)); | 241 inspector.modified(getPathPool().path(fname)); |
233 } else { | 242 } else { |
234 // check actual content to see actual changes | 243 // check actual content to see actual changes |
235 // XXX consider adding HgDataDile.compare(File/byte[]/whatever) operation to optimize comparison | 244 if (areTheSame(f, fileNode, fileNode.getLocalRevision(nid1))) { |
236 if (areTheSame(f, fileNode.content(nid1), fileNode.getPath())) { | |
237 inspector.clean(getPathPool().path(fname)); | 245 inspector.clean(getPathPool().path(fname)); |
238 } else { | 246 } else { |
239 inspector.modified(getPathPool().path(fname)); | 247 inspector.modified(getPathPool().path(fname)); |
240 } | 248 } |
241 } | 249 } |
249 // changeset nodeid + hash(actual content) => entry (Nodeid) in the next Manifest | 257 // changeset nodeid + hash(actual content) => entry (Nodeid) in the next Manifest |
250 // then it's sufficient to check parents from dirstate, and if they do not match parents from file's baseRevision (non matching parents means different nodeids). | 258 // then it's sufficient to check parents from dirstate, and if they do not match parents from file's baseRevision (non matching parents means different nodeids). |
251 // The question is whether original Hg treats this case (same content, different parents and hence nodeids) as 'modified' or 'clean' | 259 // The question is whether original Hg treats this case (same content, different parents and hence nodeids) as 'modified' or 'clean' |
252 } | 260 } |
253 | 261 |
262 private boolean areTheSame(File f, HgDataFile dataFile, int localRevision) { | |
263 // XXX consider adding HgDataDile.compare(File/byte[]/whatever) operation to optimize comparison | |
264 ByteArrayChannel bac = new ByteArrayChannel(); | |
265 boolean ioFailed = false; | |
266 try { | |
267 // need content with metadata striped off - although theoretically chances are metadata may be different, | |
268 // WC doesn't have it anyway | |
269 dataFile.content(localRevision, bac); | |
270 } catch (CancelledException ex) { | |
271 // silently ignore - can't happen, ByteArrayChannel is not cancellable | |
272 } catch (IOException ex) { | |
273 ioFailed = true; | |
274 } catch (HgException ex) { | |
275 ioFailed = true; | |
276 } | |
277 return !ioFailed && areTheSame(f, bac.toArray(), dataFile.getPath()); | |
278 } | |
279 | |
254 private boolean areTheSame(File f, final byte[] data, Path p) { | 280 private boolean areTheSame(File f, final byte[] data, Path p) { |
255 FileInputStream fis = null; | 281 FileInputStream fis = null; |
256 try { | 282 try { |
257 try { | 283 try { |
258 fis = new FileInputStream(f); | 284 fis = new FileInputStream(f); |