comparison src/com/tmate/hgkit/ll/LocalHgRepo.java @ 55:05829a70b30b

Status operation extracted into separate, cache-friendly class
author Artem Tikhomirov <tikhomirov.artem@gmail.com>
date Mon, 17 Jan 2011 04:45:09 +0100
parents f1db8610da62
children 8b0d6f1bd6b4
comparison
equal deleted inserted replaced
54:fd4f2c98995b 55:05829a70b30b
44 @Override 44 @Override
45 public String getLocation() { 45 public String getLocation() {
46 return repoLocation; 46 return repoLocation;
47 } 47 }
48 48
49 @Override 49 // public void statusLocal(int baseRevision, StatusCollector.Inspector inspector) {
50 public void status(int rev1, int rev2, final StatusInspector inspector) { 50 // LinkedList<File> folders = new LinkedList<File>();
51 final ManifestRevisionCollector collect = new ManifestRevisionCollector(); 51 // final File rootDir = repoDir.getParentFile();
52 getManifest().walk(rev1, rev1, collect); 52 // folders.add(rootDir);
53 53 // final HgDirstate dirstate = loadDirstate();
54 HgManifest.Inspector compare = new HgManifest.Inspector() { 54 // final HgIgnore hgignore = loadIgnore();
55 55 // TreeSet<String> knownEntries = dirstate.all();
56 public boolean begin(int revision, Nodeid nid) { 56 // final boolean isTipBase = baseRevision == TIP || baseRevision == getManifest().getRevisionCount();
57 return true; 57 // final StatusCollector.ManifestRevisionInspector collect = isTipBase ? null : new StatusCollector.ManifestRevisionInspector();
58 } 58 // if (!isTipBase) {
59 59 // getManifest().walk(baseRevision, baseRevision, collect);
60 public boolean next(Nodeid nid, String fname, String flags) { 60 // }
61 Nodeid nidR1 = collect.idsMap.remove(fname); 61 // do {
62 String flagsR1 = collect.flagsMap.remove(fname); 62 // File d = folders.removeFirst();
63 if (nidR1 == null) { 63 // for (File f : d.listFiles()) {
64 inspector.added(fname); 64 // if (f.isDirectory()) {
65 } else { 65 // if (!".hg".equals(f.getName())) {
66 if (nidR1.equals(nid) && ((flags == null && flagsR1 == null) || flags.equals(flagsR1))) { 66 // folders.addLast(f);
67 inspector.clean(fname); 67 // }
68 } else { 68 // } else {
69 inspector.modified(fname); 69 // // FIXME path relative to rootDir - need more robust approach
70 } 70 // String fname = normalize(f.getPath().substring(rootDir.getPath().length() + 1));
71 } 71 // if (hgignore.isIgnored(fname)) {
72 return true; 72 // inspector.ignored(fname);
73 } 73 // } else {
74 74 // if (knownEntries.remove(fname)) {
75 public boolean end(int revision) { 75 // // modified, added, removed, clean
76 for (String fname : collect.idsMap.keySet()) { 76 // if (collect != null) { // need to check against base revision, not FS file
77 inspector.removed(fname); 77 // checkLocalStatusAgainstBaseRevision(collect, fname, f, dirstate, inspector);
78 } 78 // } else {
79 if (collect.idsMap.size() != collect.flagsMap.size()) { 79 // checkLocalStatusAgainstFile(fname, f, dirstate, inspector);
80 throw new IllegalStateException(); 80 // }
81 } 81 // } else {
82 return false; 82 // inspector.unknown(fname);
83 } 83 // }
84 }; 84 // }
85 getManifest().walk(rev2, rev2, compare); 85 // }
86 } 86 // }
87 87 // } while (!folders.isEmpty());
88 public void statusLocal(int baseRevision, StatusInspector inspector) { 88 // if (collect != null) {
89 LinkedList<File> folders = new LinkedList<File>(); 89 // for (String r : collect.idsMap.keySet()) {
90 final File rootDir = repoDir.getParentFile(); 90 // inspector.removed(r);
91 folders.add(rootDir); 91 // }
92 final HgDirstate dirstate = loadDirstate(); 92 // }
93 final HgIgnore hgignore = loadIgnore(); 93 // for (String m : knownEntries) {
94 TreeSet<String> knownEntries = dirstate.all(); 94 // // removed from the repository and missing from working dir shall not be reported as 'deleted'
95 final boolean isTipBase = baseRevision == TIP || baseRevision == getManifest().getRevisionCount(); 95 // if (dirstate.checkRemoved(m) == null) {
96 final ManifestRevisionCollector collect = isTipBase ? null : new ManifestRevisionCollector(); 96 // inspector.missing(m);
97 if (!isTipBase) { 97 // }
98 getManifest().walk(baseRevision, baseRevision, collect); 98 // }
99 } 99 // }
100 do { 100 //
101 File d = folders.removeFirst(); 101 // private static void checkLocalStatusAgainstFile(String fname, File f, HgDirstate dirstate, StatusCollector.Inspector inspector) {
102 for (File f : d.listFiles()) { 102 // HgDirstate.Record r;
103 if (f.isDirectory()) { 103 // if ((r = dirstate.checkNormal(fname)) != null) {
104 if (!".hg".equals(f.getName())) { 104 // // either clean or modified
105 folders.addLast(f); 105 // if (f.lastModified() / 1000 == r.time && r.size == f.length()) {
106 } 106 // inspector.clean(fname);
107 } else { 107 // } else {
108 // FIXME path relative to rootDir - need more robust approach 108 // // FIXME check actual content to avoid false modified files
109 String fname = normalize(f.getPath().substring(rootDir.getPath().length() + 1)); 109 // inspector.modified(fname);
110 if (hgignore.isIgnored(fname)) { 110 // }
111 inspector.ignored(fname); 111 // } else if ((r = dirstate.checkAdded(fname)) != null) {
112 } else { 112 // if (r.name2 == null) {
113 if (knownEntries.remove(fname)) { 113 // inspector.added(fname);
114 // modified, added, removed, clean 114 // } else {
115 if (collect != null) { // need to check against base revision, not FS file 115 // inspector.copied(fname, r.name2);
116 checkLocalStatusAgainstBaseRevision(collect, fname, f, dirstate, inspector); 116 // }
117 } else { 117 // } else if ((r = dirstate.checkRemoved(fname)) != null) {
118 checkLocalStatusAgainstFile(fname, f, dirstate, inspector); 118 // inspector.removed(fname);
119 } 119 // } else if ((r = dirstate.checkMerged(fname)) != null) {
120 } else { 120 // inspector.modified(fname);
121 inspector.unknown(fname); 121 // }
122 } 122 // }
123 } 123 //
124 } 124 // // XXX refactor checkLocalStatus methods in more OO way
125 } 125 // private void checkLocalStatusAgainstBaseRevision(StatusCollector.ManifestRevisionInspector collect, String fname, File f, HgDirstate dirstate, StatusCollector.Inspector inspector) {
126 } while (!folders.isEmpty()); 126 // // fname is in the dirstate, either Normal, Added, Removed or Merged
127 if (collect != null) { 127 // Nodeid nid1 = collect.idsMap.remove(fname);
128 for (String r : collect.idsMap.keySet()) { 128 // String flags = collect.flagsMap.remove(fname);
129 inspector.removed(r); 129 // HgDirstate.Record r;
130 } 130 // if (nid1 == null) {
131 } 131 // // normal: added?
132 for (String m : knownEntries) { 132 // // added: not known at the time of baseRevision, shall report
133 // removed from the repository and missing from working dir shall not be reported as 'deleted' 133 // // merged: was not known, report as added?
134 if (dirstate.checkRemoved(m) == null) { 134 // if ((r = dirstate.checkAdded(fname)) != null) {
135 inspector.missing(m); 135 // if (r.name2 != null && collect.idsMap.containsKey(r.name2)) {
136 } 136 // collect.idsMap.remove(r.name2);
137 } 137 // collect.idsMap.remove(r.name2);
138 } 138 // inspector.copied(r.name2, fname);
139 139 // return;
140 private static void checkLocalStatusAgainstFile(String fname, File f, HgDirstate dirstate, StatusInspector inspector) { 140 // }
141 HgDirstate.Record r; 141 // // fall-through, report as added
142 if ((r = dirstate.checkNormal(fname)) != null) { 142 // } else if (dirstate.checkRemoved(fname) != null) {
143 // either clean or modified 143 // // removed: removed file was not known at the time of baseRevision, and we should not report it as removed
144 if (f.lastModified() / 1000 == r.time && r.size == f.length()) { 144 // return;
145 inspector.clean(fname); 145 // }
146 } else { 146 // inspector.added(fname);
147 // FIXME check actual content to avoid false modified files 147 // } else {
148 inspector.modified(fname); 148 // // was known; check whether clean or modified
149 } 149 // // when added - seems to be the case of a file added once again, hence need to check if content is different
150 } else if ((r = dirstate.checkAdded(fname)) != null) { 150 // if ((r = dirstate.checkNormal(fname)) != null || (r = dirstate.checkMerged(fname)) != null || (r = dirstate.checkAdded(fname)) != null) {
151 if (r.name2 == null) { 151 // // either clean or modified
152 inspector.added(fname); 152 // HgDataFile fileNode = getFileNode(fname);
153 } else { 153 // final int lengthAtRevision = fileNode.length(nid1);
154 inspector.copied(fname, r.name2); 154 // if (r.size /* XXX File.length() ?! */ != lengthAtRevision || flags != todoGenerateFlags(fname /*java.io.File*/)) {
155 } 155 // inspector.modified(fname);
156 } else if ((r = dirstate.checkRemoved(fname)) != null) { 156 // } else {
157 inspector.removed(fname); 157 // // check actual content to see actual changes
158 } else if ((r = dirstate.checkMerged(fname)) != null) { 158 // // XXX consider adding HgDataDile.compare(File/byte[]/whatever) operation to optimize comparison
159 inspector.modified(fname); 159 // if (areTheSame(f, fileNode.content(nid1))) {
160 } 160 // inspector.clean(fname);
161 } 161 // } else {
162 162 // inspector.modified(fname);
163 // XXX refactor checkLocalStatus methods in more OO way 163 // }
164 private void checkLocalStatusAgainstBaseRevision(ManifestRevisionCollector collect, String fname, File f, HgDirstate dirstate, StatusInspector inspector) { 164 // }
165 // fname is in the dirstate, either Normal, Added, Removed or Merged 165 // }
166 Nodeid nid1 = collect.idsMap.remove(fname); 166 // // only those left in idsMap after processing are reported as removed
167 String flags = collect.flagsMap.remove(fname); 167 // }
168 HgDirstate.Record r; 168 //
169 if (nid1 == null) { 169 // // TODO think over if content comparison may be done more effectively by e.g. calculating nodeid for a local file and comparing it with nodeid from manifest
170 // normal: added? 170 // // we don't need to tell exact difference, hash should be enough to detect difference, and it doesn't involve reading historical file content, and it's relatively
171 // added: not known at the time of baseRevision, shall report 171 // // cheap to calc hash on a file (no need to keep it completely in memory). OTOH, if I'm right that the next approach is used for nodeids:
172 // merged: was not known, report as added? 172 // // changeset nodeid + hash(actual content) => entry (Nodeid) in the next Manifest
173 if ((r = dirstate.checkAdded(fname)) != null) { 173 // // then it's sufficient to check parents from dirstate, and if they do not match parents from file's baseRevision (non matching parents means different nodeids).
174 if (r.name2 != null && collect.idsMap.containsKey(r.name2)) { 174 // // The question is whether original Hg treats this case (same content, different parents and hence nodeids) as 'modified' or 'clean'
175 collect.idsMap.remove(r.name2); 175 // }
176 collect.idsMap.remove(r.name2);
177 inspector.copied(r.name2, fname);
178 return;
179 }
180 // fall-through, report as added
181 } else if (dirstate.checkRemoved(fname) != null) {
182 // removed: removed file was not known at the time of baseRevision, and we should not report it as removed
183 return;
184 }
185 inspector.added(fname);
186 } else {
187 // was known; check whether clean or modified
188 // when added - seems to be the case of a file added once again, hence need to check if content is different
189 if ((r = dirstate.checkNormal(fname)) != null || (r = dirstate.checkMerged(fname)) != null || (r = dirstate.checkAdded(fname)) != null) {
190 // either clean or modified
191 HgDataFile fileNode = getFileNode(fname);
192 final int lengthAtRevision = fileNode.length(nid1);
193 if (r.size /* XXX File.length() ?! */ != lengthAtRevision || flags != todoGenerateFlags(fname /*java.io.File*/)) {
194 inspector.modified(fname);
195 } else {
196 // check actual content to see actual changes
197 // XXX consider adding HgDataDile.compare(File/byte[]/whatever) operation to optimize comparison
198 if (areTheSame(f, fileNode.content(nid1))) {
199 inspector.clean(fname);
200 } else {
201 inspector.modified(fname);
202 }
203 }
204 }
205 // only those left in idsMap after processing are reported as removed
206 }
207
208 // TODO think over if content comparison may be done more effectively by e.g. calculating nodeid for a local file and comparing it with nodeid from manifest
209 // we don't need to tell exact difference, hash should be enough to detect difference, and it doesn't involve reading historical file content, and it's relatively
210 // cheap to calc hash on a file (no need to keep it completely in memory). OTOH, if I'm right that the next approach is used for nodeids:
211 // changeset nodeid + hash(actual content) => entry (Nodeid) in the next Manifest
212 // then it's sufficient to check parents from dirstate, and if they do not match parents from file's baseRevision (non matching parents means different nodeids).
213 // The question is whether original Hg treats this case (same content, different parents and hence nodeids) as 'modified' or 'clean'
214 }
215 176
216 private static String todoGenerateFlags(String fname) { 177 private static String todoGenerateFlags(String fname) {
217 // FIXME implement 178 // FIXME implement
218 return null; 179 return null;
219 } 180 }
414 if (path.startsWith("/")) { 375 if (path.startsWith("/")) {
415 path = path.substring(1); 376 path = path.substring(1);
416 } 377 }
417 return path; 378 return path;
418 } 379 }
419
420 // XXX idsMap is being modified from outside. It's better to let outer (modifying) code to create these maps instead
421 private static final class ManifestRevisionCollector implements HgManifest.Inspector {
422 final HashMap<String, Nodeid> idsMap = new HashMap<String, Nodeid>();
423 final HashMap<String, String> flagsMap = new HashMap<String, String>();
424
425 public boolean next(Nodeid nid, String fname, String flags) {
426 idsMap.put(fname, nid);
427 flagsMap.put(fname, flags);
428 return true;
429 }
430
431 public boolean end(int revision) {
432 return false;
433 }
434
435 public boolean begin(int revision, Nodeid nid) {
436 return true;
437 }
438 }
439 } 380 }