kitaev@213: /* kitaev@213: * Copyright (c) 2011 TMate Software Ltd kitaev@213: * kitaev@213: * This program is free software; you can redistribute it and/or modify kitaev@213: * it under the terms of the GNU General Public License as published by kitaev@213: * the Free Software Foundation; version 2 of the License. kitaev@213: * kitaev@213: * This program is distributed in the hope that it will be useful, kitaev@213: * but WITHOUT ANY WARRANTY; without even the implied warranty of kitaev@213: * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the kitaev@213: * GNU General Public License for more details. kitaev@213: * kitaev@213: * For information on how to redistribute this software under kitaev@213: * the terms of a license other than GNU General Public License kitaev@213: * contact TMate Software at support@hg4j.com kitaev@213: */ kitaev@213: package org.tmatesoft.hg.internal; kitaev@213: kitaev@213: import java.nio.ByteBuffer; kitaev@213: import java.util.ArrayList; kitaev@213: import java.util.Map; kitaev@213: import java.util.TreeMap; kitaev@213: kitaev@213: import org.tmatesoft.hg.repo.HgChangelog.RawChangeset; kitaev@213: import org.tmatesoft.hg.repo.HgRepository; kitaev@213: import org.tmatesoft.hg.util.Path; kitaev@213: kitaev@213: /** kitaev@213: * kitaev@213: * @author Artem Tikhomirov kitaev@213: * @author TMate Software Ltd. kitaev@213: */ kitaev@213: public class KeywordFilter implements Filter { kitaev@213: // present implementation is stateless, however, filter use pattern shall not assume that. In fact, Factory may us that kitaev@213: private final HgRepository repo; kitaev@213: private final boolean isExpanding; kitaev@213: private final TreeMap keywords; kitaev@213: private final int minBufferLen; kitaev@213: private final Path path; kitaev@213: private RawChangeset latestFileCset; kitaev@213: kitaev@213: /** kitaev@213: * kitaev@213: * @param hgRepo kitaev@213: * @param path kitaev@213: * @param expand true to expand keywords, false to shrink kitaev@213: */ kitaev@213: private KeywordFilter(HgRepository hgRepo, Path p, boolean expand) { kitaev@213: repo = hgRepo; kitaev@213: path = p; kitaev@213: isExpanding = expand; kitaev@213: keywords = new TreeMap(); kitaev@213: keywords.put("Id", "Id"); kitaev@213: keywords.put("Revision", "Revision"); kitaev@213: keywords.put("Author", "Author"); kitaev@213: keywords.put("Date", "Date"); kitaev@213: keywords.put("LastChangedRevision", "LastChangedRevision"); kitaev@213: keywords.put("LastChangedBy", "LastChangedBy"); kitaev@213: keywords.put("LastChangedDate", "LastChangedDate"); kitaev@213: keywords.put("Source", "Source"); kitaev@213: keywords.put("Header", "Header"); kitaev@213: kitaev@213: int l = 0; kitaev@213: for (String s : keywords.keySet()) { kitaev@213: if (s.length() > l) { kitaev@213: l = s.length(); kitaev@213: } kitaev@213: } kitaev@213: // FIXME later may implement #filter() not to read full kw value (just "$kw:"). However, limit of maxLen + 2 would keep valid. kitaev@213: // for buffers less then minBufferLen, there are chances #filter() implementation would never end kitaev@213: // (i.e. for input "$LongestKey"$ kitaev@213: minBufferLen = l + 2 + (isExpanding ? 0 : 120 /*any reasonable constant for max possible kw value length*/); kitaev@213: } kitaev@213: kitaev@213: /** kitaev@213: * @param src buffer ready to be read kitaev@213: * @return buffer ready to be read and original buffer's position modified to reflect consumed bytes. IOW, if source buffer kitaev@213: * on return has remaining bytes, they are assumed not-read (not processed) and next chunk passed to filter is supposed to kitaev@213: * start with them kitaev@213: */ kitaev@213: public ByteBuffer filter(ByteBuffer src) { kitaev@213: if (src.capacity() < minBufferLen) { kitaev@213: throw new IllegalStateException(String.format("Need buffer of at least %d bytes to ensure filter won't hang", minBufferLen)); kitaev@213: } kitaev@213: ByteBuffer rv = null; kitaev@213: int keywordStart = -1; kitaev@213: int x = src.position(); kitaev@213: int copyFrom = x; // needs to be updated each time we copy a slice, but not each time we modify source index (x) kitaev@213: while (x < src.limit()) { kitaev@213: if (keywordStart == -1) { kitaev@213: int i = indexOf(src, '$', x, false); kitaev@213: if (i == -1) { kitaev@213: if (rv == null) { kitaev@213: return src; kitaev@213: } else { kitaev@213: copySlice(src, copyFrom, src.limit(), rv); kitaev@213: rv.flip(); kitaev@213: src.position(src.limit()); kitaev@213: return rv; kitaev@213: } kitaev@213: } kitaev@213: keywordStart = i; kitaev@213: // fall-through kitaev@213: } kitaev@213: if (keywordStart >= 0) { kitaev@213: int i = indexOf(src, '$', keywordStart+1, true); kitaev@213: if (i == -1) { kitaev@213: // end of buffer reached kitaev@213: if (rv == null) { kitaev@213: if (keywordStart == x) { kitaev@213: // FIXME in fact, x might be equal to keywordStart and to src.position() here ('$' is first character in the buffer, kitaev@213: // and there are no other '$' not eols till the end of the buffer). This would lead to deadlock (filter won't consume any kitaev@213: // bytes). To prevent this, either shall copy bytes [keywordStart..buffer.limit()) to local buffer and use it on the next invocation, kitaev@213: // or add lookup of the keywords right after first '$' is found (do not wait for closing '$'). For now, large enough src buffer would be sufficient kitaev@213: // not to run into such situation kitaev@213: throw new IllegalStateException("Try src buffer of a greater size"); kitaev@213: } kitaev@213: rv = ByteBuffer.allocate(keywordStart - copyFrom); kitaev@213: } kitaev@213: // copy all from source till latest possible kw start kitaev@213: copySlice(src, copyFrom, keywordStart, rv); kitaev@213: rv.flip(); kitaev@213: // and tell caller we've consumed only to the potential kw start kitaev@213: src.position(keywordStart); kitaev@213: return rv; kitaev@213: } else if (src.get(i) == '$') { kitaev@213: // end of keyword, or start of a new one. kitaev@213: String keyword; kitaev@213: if ((keyword = matchKeyword(src, keywordStart, i)) != null) { kitaev@213: if (rv == null) { kitaev@213: // src.remaining(), not .capacity because src is not read, and remaining represents kitaev@213: // actual bytes count, while capacity - potential. kitaev@213: // Factor of 4 is pure guess and a HACK, need to be fixed with re-expanding buffer on demand kitaev@213: rv = ByteBuffer.allocate(isExpanding ? src.remaining() * 4 : src.remaining()); kitaev@213: } kitaev@213: copySlice(src, copyFrom, keywordStart+1, rv); kitaev@213: rv.put(keyword.getBytes()); kitaev@213: if (isExpanding) { kitaev@213: rv.put((byte) ':'); kitaev@213: rv.put((byte) ' '); kitaev@213: expandKeywordValue(keyword, rv); kitaev@213: rv.put((byte) ' '); kitaev@213: } kitaev@213: rv.put((byte) '$'); kitaev@213: keywordStart = -1; kitaev@213: x = i+1; kitaev@213: copyFrom = x; kitaev@213: continue; kitaev@213: } else { kitaev@213: if (rv != null) { kitaev@213: // we've already did some substitution, thus need to copy bytes we've scanned. kitaev@213: copySlice(src, x, i, rv); kitaev@213: copyFrom = i; kitaev@213: } // no else in attempt to avoid rv creation if no real kw would be found kitaev@213: keywordStart = i; kitaev@213: x = i; // '$' at i wasn't consumed, hence x points to i, not i+1. This is to avoid problems with case: "sdfsd $ asdfs $Id$ sdf" kitaev@213: continue; kitaev@213: } kitaev@213: } else { kitaev@213: assert src.get(i) == '\n' || src.get(i) == '\r'; kitaev@213: // line break kitaev@213: if (rv != null) { kitaev@213: copySlice(src, x, i+1, rv); kitaev@213: copyFrom = i+1; kitaev@213: } kitaev@213: x = i+1; kitaev@213: keywordStart = -1; // Wasn't keyword, really kitaev@213: continue; // try once again kitaev@213: } kitaev@213: } kitaev@213: } kitaev@213: if (keywordStart != -1) { kitaev@213: if (rv == null) { kitaev@213: // no expansion happened yet, and we have potential kw start kitaev@213: rv = ByteBuffer.allocate(keywordStart - src.position()); kitaev@213: copySlice(src, src.position(), keywordStart, rv); kitaev@213: } kitaev@213: src.position(keywordStart); kitaev@213: } kitaev@213: if (rv != null) { kitaev@213: rv.flip(); kitaev@213: return rv; kitaev@213: } kitaev@213: return src; kitaev@213: } kitaev@213: kitaev@213: /** kitaev@213: * @param keyword kitaev@213: * @param rv kitaev@213: */ kitaev@213: private void expandKeywordValue(String keyword, ByteBuffer rv) { kitaev@213: if ("Id".equals(keyword)) { kitaev@213: rv.put(identityString().getBytes()); kitaev@213: } else if ("Revision".equals(keyword)) { kitaev@213: rv.put(revision().getBytes()); kitaev@213: } else if ("Author".equals(keyword)) { kitaev@213: rv.put(username().getBytes()); kitaev@213: } else if ("Date".equals(keyword)) { kitaev@213: rv.put(date().getBytes()); kitaev@213: } else { kitaev@213: throw new IllegalStateException(String.format("Keyword %s is not yet supported", keyword)); kitaev@213: } kitaev@213: } kitaev@213: kitaev@213: private String matchKeyword(ByteBuffer src, int kwStart, int kwEnd) { kitaev@213: assert kwEnd - kwStart - 1 > 0; kitaev@213: assert src.get(kwStart) == src.get(kwEnd) && src.get(kwEnd) == '$'; kitaev@213: char[] chars = new char[kwEnd - kwStart - 1]; kitaev@213: int i; kitaev@213: for (i = 0; i < chars.length; i++) { kitaev@213: char c = (char) src.get(kwStart + 1 + i); kitaev@213: if (c == ':') { kitaev@213: break; kitaev@213: } kitaev@213: chars[i] = c; kitaev@213: } kitaev@213: String kw = new String(chars, 0, i); kitaev@213: // XXX may use subMap to look up keywords based on few available characters (not waiting till closing $) kitaev@213: // System.out.println(keywords.subMap("I", "J")); kitaev@213: // System.out.println(keywords.subMap("A", "B")); kitaev@213: // System.out.println(keywords.subMap("Au", "B")); kitaev@213: return keywords.get(kw); kitaev@213: } kitaev@213: kitaev@213: // copies part of the src buffer, [from..to). doesn't modify src position kitaev@213: static void copySlice(ByteBuffer src, int from, int to, ByteBuffer dst) { kitaev@213: if (to > src.limit()) { kitaev@213: throw new IllegalArgumentException("Bad right boundary"); kitaev@213: } kitaev@213: if (dst.remaining() < to - from) { kitaev@213: throw new IllegalArgumentException("Not enough room in the destination buffer"); kitaev@213: } kitaev@213: for (int i = from; i < to; i++) { kitaev@213: dst.put(src.get(i)); kitaev@213: } kitaev@213: } kitaev@213: kitaev@213: private static int indexOf(ByteBuffer b, char ch, int from, boolean newlineBreaks) { kitaev@213: for (int i = from; i < b.limit(); i++) { kitaev@213: byte c = b.get(i); kitaev@213: if (ch == c) { kitaev@213: return i; kitaev@213: } kitaev@213: if (newlineBreaks && (c == '\n' || c == '\r')) { kitaev@213: return i; kitaev@213: } kitaev@213: } kitaev@213: return -1; kitaev@213: } kitaev@213: kitaev@213: private String identityString() { kitaev@213: return String.format("%s,v %s %s %s", path, revision(), date(), username()); kitaev@213: } kitaev@213: kitaev@213: private String revision() { kitaev@213: // FIXME add cset's nodeid into Changeset class kitaev@213: int csetRev = repo.getFileNode(path).getChangesetLocalRevision(HgRepository.TIP); kitaev@213: return repo.getChangelog().getRevision(csetRev).shortNotation(); kitaev@213: } kitaev@213: kitaev@213: private String username() { kitaev@213: return getChangeset().user(); kitaev@213: } kitaev@213: kitaev@213: private String date() { kitaev@213: return String.format("%tY/% patterns = new ArrayList(); kitaev@213: for (Map.Entry e : cfg.getSection("keyword").entrySet()) { kitaev@213: if (!"ignore".equalsIgnoreCase(e.getValue())) { kitaev@213: patterns.add(e.getKey()); kitaev@213: } kitaev@213: } kitaev@213: matcher = new PathGlobMatcher(patterns.toArray(new String[patterns.size()])); kitaev@213: // TODO read and respect keyword patterns from [keywordmaps] kitaev@213: } kitaev@213: kitaev@213: public Filter create(Path path, Options opts) { kitaev@213: if (matcher.accept(path)) { kitaev@213: return new KeywordFilter(repo, path, opts.getDirection() == Filter.Direction.FromRepo); kitaev@213: } kitaev@213: return null; kitaev@213: } kitaev@213: } kitaev@213: kitaev@213: // kitaev@213: // public static void main(String[] args) throws Exception { kitaev@213: // FileInputStream fis = new FileInputStream(new File("/temp/kwoutput.txt")); kitaev@213: // FileOutputStream fos = new FileOutputStream(new File("/temp/kwoutput2.txt")); kitaev@213: // ByteBuffer b = ByteBuffer.allocate(256); kitaev@213: // KeywordFilter kwFilter = new KeywordFilter(false); kitaev@213: // while (fis.getChannel().read(b) != -1) { kitaev@213: // b.flip(); // get ready to be read kitaev@213: // ByteBuffer f = kwFilter.filter(b); kitaev@213: // fos.getChannel().write(f); // XXX in fact, f may not be fully consumed kitaev@213: // if (b.hasRemaining()) { kitaev@213: // b.compact(); kitaev@213: // } else { kitaev@213: // b.clear(); kitaev@213: // } kitaev@213: // } kitaev@213: // fis.close(); kitaev@213: // fos.flush(); kitaev@213: // fos.close(); kitaev@213: // } kitaev@213: }