comparison src/com/tmate/hgkit/ll/LocalHgRepo.java @ 57:8b0d6f1bd6b4

Local status is back
author Artem Tikhomirov <tikhomirov.artem@gmail.com>
date Mon, 17 Jan 2011 05:54:25 +0100
parents 05829a70b30b
children 4cfc47bc14cc
comparison
equal deleted inserted replaced
56:576d6e8a09f6 57:8b0d6f1bd6b4
9 import java.io.FileInputStream; 9 import java.io.FileInputStream;
10 import java.io.IOException; 10 import java.io.IOException;
11 import java.io.InputStreamReader; 11 import java.io.InputStreamReader;
12 import java.lang.ref.SoftReference; 12 import java.lang.ref.SoftReference;
13 import java.util.Arrays; 13 import java.util.Arrays;
14 import java.util.Collections;
14 import java.util.HashMap; 15 import java.util.HashMap;
15 import java.util.LinkedList; 16 import java.util.LinkedList;
17 import java.util.Set;
16 import java.util.TreeSet; 18 import java.util.TreeSet;
17 19
18 import com.tmate.hgkit.fs.DataAccessProvider; 20 import com.tmate.hgkit.fs.DataAccessProvider;
19 21
20 /** 22 /**
44 @Override 46 @Override
45 public String getLocation() { 47 public String getLocation() {
46 return repoLocation; 48 return repoLocation;
47 } 49 }
48 50
49 // public void statusLocal(int baseRevision, StatusCollector.Inspector inspector) { 51 public void statusLocal(int baseRevision, StatusCollector.Inspector inspector) {
50 // LinkedList<File> folders = new LinkedList<File>(); 52 LinkedList<File> folders = new LinkedList<File>();
51 // final File rootDir = repoDir.getParentFile(); 53 final File rootDir = repoDir.getParentFile();
52 // folders.add(rootDir); 54 folders.add(rootDir);
53 // final HgDirstate dirstate = loadDirstate(); 55 final HgDirstate dirstate = loadDirstate();
54 // final HgIgnore hgignore = loadIgnore(); 56 final HgIgnore hgignore = loadIgnore();
55 // TreeSet<String> knownEntries = dirstate.all(); 57 TreeSet<String> knownEntries = dirstate.all();
56 // final boolean isTipBase = baseRevision == TIP || baseRevision == getManifest().getRevisionCount(); 58 final boolean isTipBase = baseRevision == TIP || baseRevision == getManifest().getRevisionCount();
57 // final StatusCollector.ManifestRevisionInspector collect = isTipBase ? null : new StatusCollector.ManifestRevisionInspector(); 59 StatusCollector.ManifestRevisionInspector collect = null;
58 // if (!isTipBase) { 60 Set<String> baseRevFiles = Collections.emptySet();
59 // getManifest().walk(baseRevision, baseRevision, collect); 61 if (!isTipBase) {
60 // } 62 collect = new StatusCollector.ManifestRevisionInspector(baseRevision, baseRevision);
61 // do { 63 getManifest().walk(baseRevision, baseRevision, collect);
62 // File d = folders.removeFirst(); 64 baseRevFiles = new TreeSet<String>(collect.files(baseRevision));
63 // for (File f : d.listFiles()) { 65 }
64 // if (f.isDirectory()) { 66 do {
65 // if (!".hg".equals(f.getName())) { 67 File d = folders.removeFirst();
66 // folders.addLast(f); 68 for (File f : d.listFiles()) {
67 // } 69 if (f.isDirectory()) {
68 // } else { 70 if (!".hg".equals(f.getName())) {
69 // // FIXME path relative to rootDir - need more robust approach 71 folders.addLast(f);
70 // String fname = normalize(f.getPath().substring(rootDir.getPath().length() + 1)); 72 }
71 // if (hgignore.isIgnored(fname)) { 73 } else {
72 // inspector.ignored(fname); 74 // FIXME path relative to rootDir - need more robust approach
73 // } else { 75 String fname = normalize(f.getPath().substring(rootDir.getPath().length() + 1));
74 // if (knownEntries.remove(fname)) { 76 if (hgignore.isIgnored(fname)) {
75 // // modified, added, removed, clean 77 inspector.ignored(fname);
76 // if (collect != null) { // need to check against base revision, not FS file 78 } else {
77 // checkLocalStatusAgainstBaseRevision(collect, fname, f, dirstate, inspector); 79 if (knownEntries.remove(fname)) {
78 // } else { 80 // modified, added, removed, clean
79 // checkLocalStatusAgainstFile(fname, f, dirstate, inspector); 81 if (collect != null) { // need to check against base revision, not FS file
80 // } 82 Nodeid nid1 = collect.nodeid(baseRevision, fname);
81 // } else { 83 String flags = collect.flags(baseRevision, fname);
82 // inspector.unknown(fname); 84 checkLocalStatusAgainstBaseRevision(baseRevFiles, nid1, flags, fname, f, dirstate, inspector);
83 // } 85 baseRevFiles.remove(fname);
84 // } 86 } else {
85 // } 87 checkLocalStatusAgainstFile(fname, f, dirstate, inspector);
86 // } 88 }
87 // } while (!folders.isEmpty()); 89 } else {
88 // if (collect != null) { 90 inspector.unknown(fname);
89 // for (String r : collect.idsMap.keySet()) { 91 }
90 // inspector.removed(r); 92 }
91 // } 93 }
92 // } 94 }
93 // for (String m : knownEntries) { 95 } while (!folders.isEmpty());
94 // // removed from the repository and missing from working dir shall not be reported as 'deleted' 96 if (collect != null) {
95 // if (dirstate.checkRemoved(m) == null) { 97 for (String r : baseRevFiles) {
96 // inspector.missing(m); 98 inspector.removed(r);
97 // } 99 }
98 // } 100 }
99 // } 101 for (String m : knownEntries) {
100 // 102 // removed from the repository and missing from working dir shall not be reported as 'deleted'
101 // private static void checkLocalStatusAgainstFile(String fname, File f, HgDirstate dirstate, StatusCollector.Inspector inspector) { 103 if (dirstate.checkRemoved(m) == null) {
102 // HgDirstate.Record r; 104 inspector.missing(m);
103 // if ((r = dirstate.checkNormal(fname)) != null) { 105 }
104 // // either clean or modified 106 }
105 // if (f.lastModified() / 1000 == r.time && r.size == f.length()) { 107 }
106 // inspector.clean(fname); 108
107 // } else { 109 private static void checkLocalStatusAgainstFile(String fname, File f, HgDirstate dirstate, StatusCollector.Inspector inspector) {
108 // // FIXME check actual content to avoid false modified files 110 HgDirstate.Record r;
109 // inspector.modified(fname); 111 if ((r = dirstate.checkNormal(fname)) != null) {
110 // } 112 // either clean or modified
111 // } else if ((r = dirstate.checkAdded(fname)) != null) { 113 if (f.lastModified() / 1000 == r.time && r.size == f.length()) {
112 // if (r.name2 == null) { 114 inspector.clean(fname);
113 // inspector.added(fname); 115 } else {
114 // } else { 116 // FIXME check actual content to avoid false modified files
115 // inspector.copied(fname, r.name2); 117 inspector.modified(fname);
116 // } 118 }
117 // } else if ((r = dirstate.checkRemoved(fname)) != null) { 119 } else if ((r = dirstate.checkAdded(fname)) != null) {
118 // inspector.removed(fname); 120 if (r.name2 == null) {
119 // } else if ((r = dirstate.checkMerged(fname)) != null) { 121 inspector.added(fname);
120 // inspector.modified(fname); 122 } else {
121 // } 123 inspector.copied(fname, r.name2);
122 // } 124 }
123 // 125 } else if ((r = dirstate.checkRemoved(fname)) != null) {
124 // // XXX refactor checkLocalStatus methods in more OO way 126 inspector.removed(fname);
125 // private void checkLocalStatusAgainstBaseRevision(StatusCollector.ManifestRevisionInspector collect, String fname, File f, HgDirstate dirstate, StatusCollector.Inspector inspector) { 127 } else if ((r = dirstate.checkMerged(fname)) != null) {
126 // // fname is in the dirstate, either Normal, Added, Removed or Merged 128 inspector.modified(fname);
127 // Nodeid nid1 = collect.idsMap.remove(fname); 129 }
128 // String flags = collect.flagsMap.remove(fname); 130 }
129 // HgDirstate.Record r; 131
130 // if (nid1 == null) { 132 // XXX refactor checkLocalStatus methods in more OO way
131 // // normal: added? 133 private void checkLocalStatusAgainstBaseRevision(Set<String> baseRevNames, Nodeid nid1, String flags, String fname, File f, HgDirstate dirstate, StatusCollector.Inspector inspector) {
132 // // added: not known at the time of baseRevision, shall report 134 // fname is in the dirstate, either Normal, Added, Removed or Merged
133 // // merged: was not known, report as added? 135 HgDirstate.Record r;
134 // if ((r = dirstate.checkAdded(fname)) != null) { 136 if (nid1 == null) {
135 // if (r.name2 != null && collect.idsMap.containsKey(r.name2)) { 137 // normal: added?
136 // collect.idsMap.remove(r.name2); 138 // added: not known at the time of baseRevision, shall report
137 // collect.idsMap.remove(r.name2); 139 // merged: was not known, report as added?
138 // inspector.copied(r.name2, fname); 140 if ((r = dirstate.checkAdded(fname)) != null) {
139 // return; 141 if (r.name2 != null && baseRevNames.contains(r.name2)) {
140 // } 142 baseRevNames.remove(r.name2);
141 // // fall-through, report as added 143 inspector.copied(r.name2, fname);
142 // } else if (dirstate.checkRemoved(fname) != null) { 144 return;
143 // // removed: removed file was not known at the time of baseRevision, and we should not report it as removed 145 }
144 // return; 146 // fall-through, report as added
145 // } 147 } else if (dirstate.checkRemoved(fname) != null) {
146 // inspector.added(fname); 148 // removed: removed file was not known at the time of baseRevision, and we should not report it as removed
147 // } else { 149 return;
148 // // was known; check whether clean or modified 150 }
149 // // when added - seems to be the case of a file added once again, hence need to check if content is different 151 inspector.added(fname);
150 // if ((r = dirstate.checkNormal(fname)) != null || (r = dirstate.checkMerged(fname)) != null || (r = dirstate.checkAdded(fname)) != null) { 152 } else {
151 // // either clean or modified 153 // was known; check whether clean or modified
152 // HgDataFile fileNode = getFileNode(fname); 154 // when added - seems to be the case of a file added once again, hence need to check if content is different
153 // final int lengthAtRevision = fileNode.length(nid1); 155 if ((r = dirstate.checkNormal(fname)) != null || (r = dirstate.checkMerged(fname)) != null || (r = dirstate.checkAdded(fname)) != null) {
154 // if (r.size /* XXX File.length() ?! */ != lengthAtRevision || flags != todoGenerateFlags(fname /*java.io.File*/)) { 156 // either clean or modified
155 // inspector.modified(fname); 157 HgDataFile fileNode = getFileNode(fname);
156 // } else { 158 final int lengthAtRevision = fileNode.length(nid1);
157 // // check actual content to see actual changes 159 if (r.size /* XXX File.length() ?! */ != lengthAtRevision || flags != todoGenerateFlags(fname /*java.io.File*/)) {
158 // // XXX consider adding HgDataDile.compare(File/byte[]/whatever) operation to optimize comparison 160 inspector.modified(fname);
159 // if (areTheSame(f, fileNode.content(nid1))) { 161 } else {
160 // inspector.clean(fname); 162 // check actual content to see actual changes
161 // } else { 163 // XXX consider adding HgDataDile.compare(File/byte[]/whatever) operation to optimize comparison
162 // inspector.modified(fname); 164 if (areTheSame(f, fileNode.content(nid1))) {
163 // } 165 inspector.clean(fname);
164 // } 166 } else {
165 // } 167 inspector.modified(fname);
166 // // only those left in idsMap after processing are reported as removed 168 }
167 // } 169 }
168 // 170 }
169 // // TODO think over if content comparison may be done more effectively by e.g. calculating nodeid for a local file and comparing it with nodeid from manifest 171 // only those left in idsMap after processing are reported as removed
170 // // we don't need to tell exact difference, hash should be enough to detect difference, and it doesn't involve reading historical file content, and it's relatively 172 }
171 // // cheap to calc hash on a file (no need to keep it completely in memory). OTOH, if I'm right that the next approach is used for nodeids: 173
172 // // changeset nodeid + hash(actual content) => entry (Nodeid) in the next Manifest 174 // TODO think over if content comparison may be done more effectively by e.g. calculating nodeid for a local file and comparing it with nodeid from manifest
173 // // then it's sufficient to check parents from dirstate, and if they do not match parents from file's baseRevision (non matching parents means different nodeids). 175 // we don't need to tell exact difference, hash should be enough to detect difference, and it doesn't involve reading historical file content, and it's relatively
174 // // The question is whether original Hg treats this case (same content, different parents and hence nodeids) as 'modified' or 'clean' 176 // cheap to calc hash on a file (no need to keep it completely in memory). OTOH, if I'm right that the next approach is used for nodeids:
175 // } 177 // changeset nodeid + hash(actual content) => entry (Nodeid) in the next Manifest
178 // then it's sufficient to check parents from dirstate, and if they do not match parents from file's baseRevision (non matching parents means different nodeids).
179 // The question is whether original Hg treats this case (same content, different parents and hence nodeids) as 'modified' or 'clean'
180 }
176 181
177 private static String todoGenerateFlags(String fname) { 182 private static String todoGenerateFlags(String fname) {
178 // FIXME implement 183 // FIXME implement
179 return null; 184 return null;
180 } 185 }