Mercurial > hg4j
comparison hg4j/src/main/java/org/tmatesoft/hg/internal/KeywordFilter.java @ 213:6ec4af642ba8 gradle
Project uses Gradle for build - actual changes
author | Alexander Kitaev <kitaev@gmail.com> |
---|---|
date | Tue, 10 May 2011 10:52:53 +0200 |
parents | |
children |
comparison
equal
deleted
inserted
replaced
212:edb2e2829352 | 213:6ec4af642ba8 |
---|---|
1 /* | |
2 * Copyright (c) 2011 TMate Software Ltd | |
3 * | |
4 * This program is free software; you can redistribute it and/or modify | |
5 * it under the terms of the GNU General Public License as published by | |
6 * the Free Software Foundation; version 2 of the License. | |
7 * | |
8 * This program is distributed in the hope that it will be useful, | |
9 * but WITHOUT ANY WARRANTY; without even the implied warranty of | |
10 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
11 * GNU General Public License for more details. | |
12 * | |
13 * For information on how to redistribute this software under | |
14 * the terms of a license other than GNU General Public License | |
15 * contact TMate Software at support@hg4j.com | |
16 */ | |
17 package org.tmatesoft.hg.internal; | |
18 | |
19 import java.nio.ByteBuffer; | |
20 import java.util.ArrayList; | |
21 import java.util.Map; | |
22 import java.util.TreeMap; | |
23 | |
24 import org.tmatesoft.hg.repo.HgChangelog.RawChangeset; | |
25 import org.tmatesoft.hg.repo.HgRepository; | |
26 import org.tmatesoft.hg.util.Path; | |
27 | |
28 /** | |
29 * | |
30 * @author Artem Tikhomirov | |
31 * @author TMate Software Ltd. | |
32 */ | |
33 public class KeywordFilter implements Filter { | |
34 // present implementation is stateless, however, filter use pattern shall not assume that. In fact, Factory may us that | |
35 private final HgRepository repo; | |
36 private final boolean isExpanding; | |
37 private final TreeMap<String,String> keywords; | |
38 private final int minBufferLen; | |
39 private final Path path; | |
40 private RawChangeset latestFileCset; | |
41 | |
42 /** | |
43 * | |
44 * @param hgRepo | |
45 * @param path | |
46 * @param expand <code>true</code> to expand keywords, <code>false</code> to shrink | |
47 */ | |
48 private KeywordFilter(HgRepository hgRepo, Path p, boolean expand) { | |
49 repo = hgRepo; | |
50 path = p; | |
51 isExpanding = expand; | |
52 keywords = new TreeMap<String,String>(); | |
53 keywords.put("Id", "Id"); | |
54 keywords.put("Revision", "Revision"); | |
55 keywords.put("Author", "Author"); | |
56 keywords.put("Date", "Date"); | |
57 keywords.put("LastChangedRevision", "LastChangedRevision"); | |
58 keywords.put("LastChangedBy", "LastChangedBy"); | |
59 keywords.put("LastChangedDate", "LastChangedDate"); | |
60 keywords.put("Source", "Source"); | |
61 keywords.put("Header", "Header"); | |
62 | |
63 int l = 0; | |
64 for (String s : keywords.keySet()) { | |
65 if (s.length() > l) { | |
66 l = s.length(); | |
67 } | |
68 } | |
69 // FIXME later may implement #filter() not to read full kw value (just "$kw:"). However, limit of maxLen + 2 would keep valid. | |
70 // for buffers less then minBufferLen, there are chances #filter() implementation would never end | |
71 // (i.e. for input "$LongestKey"$ | |
72 minBufferLen = l + 2 + (isExpanding ? 0 : 120 /*any reasonable constant for max possible kw value length*/); | |
73 } | |
74 | |
75 /** | |
76 * @param src buffer ready to be read | |
77 * @return buffer ready to be read and original buffer's position modified to reflect consumed bytes. IOW, if source buffer | |
78 * on return has remaining bytes, they are assumed not-read (not processed) and next chunk passed to filter is supposed to | |
79 * start with them | |
80 */ | |
81 public ByteBuffer filter(ByteBuffer src) { | |
82 if (src.capacity() < minBufferLen) { | |
83 throw new IllegalStateException(String.format("Need buffer of at least %d bytes to ensure filter won't hang", minBufferLen)); | |
84 } | |
85 ByteBuffer rv = null; | |
86 int keywordStart = -1; | |
87 int x = src.position(); | |
88 int copyFrom = x; // needs to be updated each time we copy a slice, but not each time we modify source index (x) | |
89 while (x < src.limit()) { | |
90 if (keywordStart == -1) { | |
91 int i = indexOf(src, '$', x, false); | |
92 if (i == -1) { | |
93 if (rv == null) { | |
94 return src; | |
95 } else { | |
96 copySlice(src, copyFrom, src.limit(), rv); | |
97 rv.flip(); | |
98 src.position(src.limit()); | |
99 return rv; | |
100 } | |
101 } | |
102 keywordStart = i; | |
103 // fall-through | |
104 } | |
105 if (keywordStart >= 0) { | |
106 int i = indexOf(src, '$', keywordStart+1, true); | |
107 if (i == -1) { | |
108 // end of buffer reached | |
109 if (rv == null) { | |
110 if (keywordStart == x) { | |
111 // FIXME in fact, x might be equal to keywordStart and to src.position() here ('$' is first character in the buffer, | |
112 // and there are no other '$' not eols till the end of the buffer). This would lead to deadlock (filter won't consume any | |
113 // bytes). To prevent this, either shall copy bytes [keywordStart..buffer.limit()) to local buffer and use it on the next invocation, | |
114 // or add lookup of the keywords right after first '$' is found (do not wait for closing '$'). For now, large enough src buffer would be sufficient | |
115 // not to run into such situation | |
116 throw new IllegalStateException("Try src buffer of a greater size"); | |
117 } | |
118 rv = ByteBuffer.allocate(keywordStart - copyFrom); | |
119 } | |
120 // copy all from source till latest possible kw start | |
121 copySlice(src, copyFrom, keywordStart, rv); | |
122 rv.flip(); | |
123 // and tell caller we've consumed only to the potential kw start | |
124 src.position(keywordStart); | |
125 return rv; | |
126 } else if (src.get(i) == '$') { | |
127 // end of keyword, or start of a new one. | |
128 String keyword; | |
129 if ((keyword = matchKeyword(src, keywordStart, i)) != null) { | |
130 if (rv == null) { | |
131 // src.remaining(), not .capacity because src is not read, and remaining represents | |
132 // actual bytes count, while capacity - potential. | |
133 // Factor of 4 is pure guess and a HACK, need to be fixed with re-expanding buffer on demand | |
134 rv = ByteBuffer.allocate(isExpanding ? src.remaining() * 4 : src.remaining()); | |
135 } | |
136 copySlice(src, copyFrom, keywordStart+1, rv); | |
137 rv.put(keyword.getBytes()); | |
138 if (isExpanding) { | |
139 rv.put((byte) ':'); | |
140 rv.put((byte) ' '); | |
141 expandKeywordValue(keyword, rv); | |
142 rv.put((byte) ' '); | |
143 } | |
144 rv.put((byte) '$'); | |
145 keywordStart = -1; | |
146 x = i+1; | |
147 copyFrom = x; | |
148 continue; | |
149 } else { | |
150 if (rv != null) { | |
151 // we've already did some substitution, thus need to copy bytes we've scanned. | |
152 copySlice(src, x, i, rv); | |
153 copyFrom = i; | |
154 } // no else in attempt to avoid rv creation if no real kw would be found | |
155 keywordStart = i; | |
156 x = i; // '$' at i wasn't consumed, hence x points to i, not i+1. This is to avoid problems with case: "sdfsd $ asdfs $Id$ sdf" | |
157 continue; | |
158 } | |
159 } else { | |
160 assert src.get(i) == '\n' || src.get(i) == '\r'; | |
161 // line break | |
162 if (rv != null) { | |
163 copySlice(src, x, i+1, rv); | |
164 copyFrom = i+1; | |
165 } | |
166 x = i+1; | |
167 keywordStart = -1; // Wasn't keyword, really | |
168 continue; // try once again | |
169 } | |
170 } | |
171 } | |
172 if (keywordStart != -1) { | |
173 if (rv == null) { | |
174 // no expansion happened yet, and we have potential kw start | |
175 rv = ByteBuffer.allocate(keywordStart - src.position()); | |
176 copySlice(src, src.position(), keywordStart, rv); | |
177 } | |
178 src.position(keywordStart); | |
179 } | |
180 if (rv != null) { | |
181 rv.flip(); | |
182 return rv; | |
183 } | |
184 return src; | |
185 } | |
186 | |
187 /** | |
188 * @param keyword | |
189 * @param rv | |
190 */ | |
191 private void expandKeywordValue(String keyword, ByteBuffer rv) { | |
192 if ("Id".equals(keyword)) { | |
193 rv.put(identityString().getBytes()); | |
194 } else if ("Revision".equals(keyword)) { | |
195 rv.put(revision().getBytes()); | |
196 } else if ("Author".equals(keyword)) { | |
197 rv.put(username().getBytes()); | |
198 } else if ("Date".equals(keyword)) { | |
199 rv.put(date().getBytes()); | |
200 } else { | |
201 throw new IllegalStateException(String.format("Keyword %s is not yet supported", keyword)); | |
202 } | |
203 } | |
204 | |
205 private String matchKeyword(ByteBuffer src, int kwStart, int kwEnd) { | |
206 assert kwEnd - kwStart - 1 > 0; | |
207 assert src.get(kwStart) == src.get(kwEnd) && src.get(kwEnd) == '$'; | |
208 char[] chars = new char[kwEnd - kwStart - 1]; | |
209 int i; | |
210 for (i = 0; i < chars.length; i++) { | |
211 char c = (char) src.get(kwStart + 1 + i); | |
212 if (c == ':') { | |
213 break; | |
214 } | |
215 chars[i] = c; | |
216 } | |
217 String kw = new String(chars, 0, i); | |
218 // XXX may use subMap to look up keywords based on few available characters (not waiting till closing $) | |
219 // System.out.println(keywords.subMap("I", "J")); | |
220 // System.out.println(keywords.subMap("A", "B")); | |
221 // System.out.println(keywords.subMap("Au", "B")); | |
222 return keywords.get(kw); | |
223 } | |
224 | |
225 // copies part of the src buffer, [from..to). doesn't modify src position | |
226 static void copySlice(ByteBuffer src, int from, int to, ByteBuffer dst) { | |
227 if (to > src.limit()) { | |
228 throw new IllegalArgumentException("Bad right boundary"); | |
229 } | |
230 if (dst.remaining() < to - from) { | |
231 throw new IllegalArgumentException("Not enough room in the destination buffer"); | |
232 } | |
233 for (int i = from; i < to; i++) { | |
234 dst.put(src.get(i)); | |
235 } | |
236 } | |
237 | |
238 private static int indexOf(ByteBuffer b, char ch, int from, boolean newlineBreaks) { | |
239 for (int i = from; i < b.limit(); i++) { | |
240 byte c = b.get(i); | |
241 if (ch == c) { | |
242 return i; | |
243 } | |
244 if (newlineBreaks && (c == '\n' || c == '\r')) { | |
245 return i; | |
246 } | |
247 } | |
248 return -1; | |
249 } | |
250 | |
251 private String identityString() { | |
252 return String.format("%s,v %s %s %s", path, revision(), date(), username()); | |
253 } | |
254 | |
255 private String revision() { | |
256 // FIXME add cset's nodeid into Changeset class | |
257 int csetRev = repo.getFileNode(path).getChangesetLocalRevision(HgRepository.TIP); | |
258 return repo.getChangelog().getRevision(csetRev).shortNotation(); | |
259 } | |
260 | |
261 private String username() { | |
262 return getChangeset().user(); | |
263 } | |
264 | |
265 private String date() { | |
266 return String.format("%tY/%<tm/%<td %<tH:%<tM:%<tS", getChangeset().date()); | |
267 } | |
268 | |
269 private RawChangeset getChangeset() { | |
270 if (latestFileCset == null) { | |
271 // XXX consider use of ChangelogHelper | |
272 int csetRev = repo.getFileNode(path).getChangesetLocalRevision(HgRepository.TIP); | |
273 latestFileCset = repo.getChangelog().range(csetRev, csetRev).get(0); | |
274 } | |
275 return latestFileCset; | |
276 } | |
277 | |
278 public static class Factory implements Filter.Factory { | |
279 | |
280 private HgRepository repo; | |
281 private Path.Matcher matcher; | |
282 | |
283 public void initialize(HgRepository hgRepo, ConfigFile cfg) { | |
284 repo = hgRepo; | |
285 ArrayList<String> patterns = new ArrayList<String>(); | |
286 for (Map.Entry<String,String> e : cfg.getSection("keyword").entrySet()) { | |
287 if (!"ignore".equalsIgnoreCase(e.getValue())) { | |
288 patterns.add(e.getKey()); | |
289 } | |
290 } | |
291 matcher = new PathGlobMatcher(patterns.toArray(new String[patterns.size()])); | |
292 // TODO read and respect keyword patterns from [keywordmaps] | |
293 } | |
294 | |
295 public Filter create(Path path, Options opts) { | |
296 if (matcher.accept(path)) { | |
297 return new KeywordFilter(repo, path, opts.getDirection() == Filter.Direction.FromRepo); | |
298 } | |
299 return null; | |
300 } | |
301 } | |
302 | |
303 // | |
304 // public static void main(String[] args) throws Exception { | |
305 // FileInputStream fis = new FileInputStream(new File("/temp/kwoutput.txt")); | |
306 // FileOutputStream fos = new FileOutputStream(new File("/temp/kwoutput2.txt")); | |
307 // ByteBuffer b = ByteBuffer.allocate(256); | |
308 // KeywordFilter kwFilter = new KeywordFilter(false); | |
309 // while (fis.getChannel().read(b) != -1) { | |
310 // b.flip(); // get ready to be read | |
311 // ByteBuffer f = kwFilter.filter(b); | |
312 // fos.getChannel().write(f); // XXX in fact, f may not be fully consumed | |
313 // if (b.hasRemaining()) { | |
314 // b.compact(); | |
315 // } else { | |
316 // b.clear(); | |
317 // } | |
318 // } | |
319 // fis.close(); | |
320 // fos.flush(); | |
321 // fos.close(); | |
322 // } | |
323 } |