changeset 49:26e3eeaa3962

branch and user filtering for log operation
author Artem Tikhomirov <tikhomirov.artem@gmail.com>
date Sat, 15 Jan 2011 01:15:38 +0100
parents e34f90b9ded1
children f1db8610da62
files design.txt src/com/tmate/hgkit/console/Log.java src/com/tmate/hgkit/fs/RepositoryLookup.java src/com/tmate/hgkit/ll/Changeset.java src/com/tmate/hgkit/ll/HgDataFile.java src/com/tmate/hgkit/ll/Revlog.java src/com/tmate/hgkit/ll/RevlogStream.java
diffstat 7 files changed, 144 insertions(+), 27 deletions(-) [+]
line wrap: on
line diff
--- a/design.txt	Fri Jan 14 23:22:20 2011 +0100
+++ b/design.txt	Sat Jan 15 01:15:38 2011 +0100
@@ -31,10 +31,10 @@
 *.hgignored processing
 +Nodeid to keep 20 bytes always, Revlog.Inspector to get nodeid array of meaningful data exact size (nor heading 00 bytes, nor 12 extra bytes from the spec)
 +DataAccess - implement memory mapped files, 
++Changeset to get index (local revision number)
 
 DataAccess - collect debug info (buffer misses, file size/total read operations) to find out better strategy to buffer size detection. Compare performance.
 delta merge
-Changeset to get index (local revision number)
 RevisionWalker (on manifest) and WorkingCopyWalker (io.File) talking to ? and/or dirstate 
 RevlogStream - Inflater. Perhaps, InflaterStream instead?
 Implement use of fncache (use names from it - perhaps, would help for Mac issues Alex mentioned) along with 'digest'-ing long file names
--- a/src/com/tmate/hgkit/console/Log.java	Fri Jan 14 23:22:20 2011 +0100
+++ b/src/com/tmate/hgkit/console/Log.java	Sat Jan 15 01:15:38 2011 +0100
@@ -4,14 +4,18 @@
 package com.tmate.hgkit.console;
 
 import java.util.Formatter;
+import java.util.LinkedHashSet;
 import java.util.LinkedList;
 import java.util.List;
+import java.util.Map;
+import java.util.Set;
 
 import com.tmate.hgkit.fs.RepositoryLookup;
 import com.tmate.hgkit.ll.Changeset;
 import com.tmate.hgkit.ll.HgDataFile;
 import com.tmate.hgkit.ll.HgRepository;
 import com.tmate.hgkit.ll.Nodeid;
+import com.tmate.hgkit.ll.Revlog;
 
 /**
  * @author artem
@@ -28,13 +32,22 @@
 		}
 		System.out.println(hgRepo.getLocation());
 		final Dump dump = new Dump(hgRepo);
-		dump.complete = false; //cmdLineOpts;
+		dump.complete = true; //cmdLineOpts;
 		dump.reverseOrder = true;
+		dump.branches = cmdLineOpts.branches;
+		if (cmdLineOpts.users != null) {
+			dump.users = new LinkedHashSet<String>();
+			for (String u : cmdLineOpts.users) {
+				dump.users.add(u.toLowerCase());
+			}
+		}
 		if (cmdLineOpts.files.isEmpty()) {
-			// no revisions and no limit
 			if (cmdLineOpts.limit == -1) {
+				// no revisions and no limit
 				hgRepo.getChangelog().all(dump);
 			} else {
+				// in fact, external (to dump inspector) --limit processing yelds incorrect results when other args
+				// e.g. -u or -b are used (i.e. with -u shall give <limit> csets with user, not check last <limit> csets for user 
 				int[] r = new int[] { 0, hgRepo.getChangelog().getRevisionCount() };
 				if (fixRange(r, dump.reverseOrder, cmdLineOpts.limit) == 0) {
 					System.out.println("No changes");
@@ -61,11 +74,7 @@
 			}
 		}
 		//
-//		System.out.println("\n\n=========================");
-//		System.out.println("Range 1-3:");
-//		f1.history(1,3, callback);
-		//
-		//new ChangelogWalker().setFile("hello.c").setRevisionRange(1, 4).accept(new Visitor);
+		// XXX new ChangelogWalker().setFile("hello.c").setRevisionRange(1, 4).accept(new Visitor);
 	}
 	
 	private static int fixRange(int[] start_end, boolean reverse, int limit) {
@@ -83,19 +92,42 @@
 		return rv;
 	}
 
+	// Differences with standard hg log output
+	//   - complete == true (--debug) files are not broke down to modified,+ and -
 	private static final class Dump implements Changeset.Inspector {
 		// params
 		boolean complete = false;
 		boolean reverseOrder = false;
+		Set<String> branches;
+		Set<String> users; // shall be lowercased
 		// own
 		private LinkedList<String> l = new LinkedList<String>();
 		private final HgRepository repo;
+		private Revlog.ParentWalker changelogWalker;
+		private final int tip ; 
 
 		public Dump(HgRepository hgRepo) {
-			this.repo = hgRepo;
+			repo = hgRepo;
+			tip = hgRepo.getChangelog().getRevisionCount() - 1;
 		}
 
 		public void next(int revisionNumber, Nodeid nodeid, Changeset cset) {
+			if (branches != null && !branches.contains(cset.branch())) {
+				return;
+			}
+			if (users != null) {
+				String csetUser = cset.user().toLowerCase();
+				boolean found = false;
+				for (String u : users) {
+					if (csetUser.indexOf(u) != -1) {
+						found = true;
+						break;
+					}
+				}
+				if (!found) {
+					return;
+				}
+			}
 			final String s = print(revisionNumber, nodeid, cset);
 			if (reverseOrder) {
 				l.addFirst(s);
@@ -112,14 +144,27 @@
 				System.out.print(s);
 			}
 			l.clear();
+			changelogWalker = null;
 		}
 
 		private String print(int revNumber, Nodeid csetNodeid, Changeset cset) {
 			StringBuilder sb = new StringBuilder();
 			Formatter f = new Formatter(sb);
 			f.format("changeset:   %d:%s\n", revNumber, complete ? csetNodeid : csetNodeid.shortNotation());
+			if (revNumber == tip) {
+				sb.append("tag:        tip\n");
+			}
 			if (complete) {
-				f.format("parent:      %s\nparent:      %s\nmanifest:    %s\n", "-1", "-1", cset.manifest());
+				if (changelogWalker == null) {
+					changelogWalker = repo.getChangelog().new ParentWalker();
+					changelogWalker.init();
+				}
+				Nodeid p1 = changelogWalker.safeFirstParent(csetNodeid);
+				Nodeid p2 = changelogWalker.safeSecondParent(csetNodeid);
+				int p1x = p1 == Nodeid.NULL ? -1 : repo.getChangelog().getLocalRevisionNumber(p1);
+				int p2x = p2 == Nodeid.NULL ? -1 : repo.getChangelog().getLocalRevisionNumber(p2);
+				int mx = repo.getManifest().getLocalRevisionNumber(cset.manifest());
+				f.format("parent:      %d:%s\nparent:      %d:%s\nmanifest:    %d:%s\n", p1x, p1, p2x, p2, mx, cset.manifest());
 			}
 			f.format("user:        %s\ndate:        %s\n", cset.user(), cset.dateString());
 			if (complete) {
@@ -129,13 +174,19 @@
 					sb.append(' ');
 					sb.append(s);
 				}
+				if (cset.extras() != null) {
+					sb.append("\nextra:      ");
+					for (Map.Entry<String, String> e : cset.extras().entrySet()) {
+						sb.append(' ');
+						sb.append(e.getKey());
+						sb.append('=');
+						sb.append(e.getValue());
+					}
+				}
 				f.format("\ndescription:\n%s\n\n", cset.comment());
 			} else {
 				f.format("summary:     %s\n\n", cset.comment());
 			}
-			if (cset.extras() != null) {
-				f.format("extra:    " + cset.extras()); // TODO
-			}
 			return sb.toString();
 		}
 	}
--- a/src/com/tmate/hgkit/fs/RepositoryLookup.java	Fri Jan 14 23:22:20 2011 +0100
+++ b/src/com/tmate/hgkit/fs/RepositoryLookup.java	Sat Jan 15 01:15:38 2011 +0100
@@ -8,8 +8,10 @@
 import java.util.Arrays;
 import java.util.Collections;
 import java.util.Iterator;
+import java.util.LinkedHashSet;
 import java.util.LinkedList;
 import java.util.List;
+import java.util.Set;
 
 import com.tmate.hgkit.ll.HgRepository;
 import com.tmate.hgkit.ll.LocalHgRepo;
@@ -57,6 +59,8 @@
 		public String repoLocation;
 		public List<String> files;
 		public int limit = -1;
+		public Set<String> users;
+		public Set<String> branches;
 
 		public static Options parse(String[] commandLineArgs) {
 			Options rv = new Options();
@@ -84,6 +88,20 @@
 							rv.limit = Integer.parseInt(it.next());
 							break;
 						}
+						case (int) 'u' : {
+							if (rv.users == null) {
+								rv.users = new LinkedHashSet<String>();
+							}
+							rv.users.add(it.next());
+							break;
+						}
+						case (int) 'b' : {
+							if (rv.branches == null) {
+								rv.branches = new LinkedHashSet<String>();
+							}
+							rv.branches.add(it.next());
+							break;
+						}
 					}
 				} else {
 					// filename
--- a/src/com/tmate/hgkit/ll/Changeset.java	Fri Jan 14 23:22:20 2011 +0100
+++ b/src/com/tmate/hgkit/ll/Changeset.java	Sat Jan 15 01:15:38 2011 +0100
@@ -8,6 +8,7 @@
 import java.util.Collections;
 import java.util.Date;
 import java.util.Formatter;
+import java.util.HashMap;
 import java.util.List;
 import java.util.Locale;
 import java.util.Map;
@@ -36,7 +37,7 @@
 	private List<String> files; // unmodifiable collection (otherwise #files() and implicit #clone() shall be revised)
 	private Date time;
 	private int timezone; // not sure it's of any use
-	private String extras; // TODO branch, etc.
+	private Map<String,String> extras;
 	
 	private Changeset() {
 	}
@@ -69,7 +70,11 @@
 	}
 
 	public Map<String, String> extras() {
-		return null; // TODO
+		return extras;
+	}
+	
+	public String branch() {
+		return extras.get("branch");
 	}
 	
 	@Override
@@ -129,6 +134,21 @@
 		// on commit and timezone is recorded to adjust it to UTC.
 		Date _time = new Date(unixTime * 1000);
 		String _extras = space2 < _timeString.length() ? _timeString.substring(space2+1) : null;
+		Map<String, String> _extrasMap;
+		if (_extras == null) {
+			 _extrasMap = Collections.singletonMap("branch", "default"); 
+		} else {
+			_extrasMap = new HashMap<String, String>();
+			for (String pair : _extras.split("\00")) {
+				int eq = pair.indexOf('=');
+				// FIXME need to decode key/value, @see changelog.py:decodeextra
+				_extrasMap.put(pair.substring(0, eq), pair.substring(eq+1));
+			}
+			if (!_extrasMap.containsKey("branch")) {
+				_extrasMap.put("branch", "default");
+			}
+			_extrasMap = Collections.unmodifiableMap(_extrasMap);
+		}
 		
 		//
 		int lastStart = breakIndex3 + 1;
@@ -161,7 +181,7 @@
 		this.timezone = _timezone;
 		this.files = Collections.unmodifiableList(_files);
 		this.comment = _comment;
-		this.extras = _extras;
+		this.extras = _extrasMap;
 	}
 
 	private static int indexOf(byte[] src, byte what, int startOffset, int endIndex) {
--- a/src/com/tmate/hgkit/ll/HgDataFile.java	Fri Jan 14 23:22:20 2011 +0100
+++ b/src/com/tmate/hgkit/ll/HgDataFile.java	Sat Jan 15 01:15:38 2011 +0100
@@ -33,8 +33,7 @@
 	}
 
 	public int length(Nodeid nodeid) {
-		int revision = content.findLocalRevisionNumber(nodeid);
-		return content.dataLength(revision);
+		return content.dataLength(getLocalRevisionNumber(nodeid));
 	}
 
 	public byte[] content() {
--- a/src/com/tmate/hgkit/ll/Revlog.java	Fri Jan 14 23:22:20 2011 +0100
+++ b/src/com/tmate/hgkit/ll/Revlog.java	Sat Jan 15 01:15:38 2011 +0100
@@ -35,23 +35,33 @@
 		return content.revisionCount();
 	}
 
+	public int getLocalRevisionNumber(Nodeid nid) {
+		int revision = content.findLocalRevisionNumber(nid);
+		if (revision == Integer.MIN_VALUE) {
+			throw new IllegalArgumentException(String.format("%s doesn't represent a revision of %s", nid.toString(), this /*XXX HgDataFile.getPath might be more suitable here*/));
+		}
+		return revision;
+	}
+
 	// Till now, i follow approach that NULL nodeid is never part of revlog
 	public boolean isKnown(Nodeid nodeid) {
-		try {
-			int revision = content.findLocalRevisionNumber(nodeid);
-			return revision >= 0 && revision < getRevisionCount();
-		} catch (IllegalArgumentException ex) {
-			// FIXME bad way to figure out if nodeid is from this revlog
+		final int rn = content.findLocalRevisionNumber(nodeid);
+		if (Integer.MIN_VALUE == rn) {
 			return false;
 		}
+		if (rn < 0 || rn >= content.revisionCount()) {
+			// Sanity check
+			throw new IllegalStateException();
+		}
+		return true;
 	}
+
 	/**
 	 * Access to revision data as is (decompressed, but otherwise unprocessed, i.e. not parsed for e.g. changeset or manifest entries) 
 	 * @param nodeid
 	 */
 	public byte[] content(Nodeid nodeid) {
-		int revision = content.findLocalRevisionNumber(nodeid);
-		return content(revision);
+		return content(getLocalRevisionNumber(nodeid));
 	}
 	
 	/**
@@ -140,11 +150,22 @@
 		public Nodeid firstParent(Nodeid nid) {
 			return firstParent.get(nid);
 		}
+
+		// never null, Nodeid.NULL if none known
+		public Nodeid safeFirstParent(Nodeid nid) {
+			Nodeid rv = firstParent(nid);
+			return rv == null ? Nodeid.NULL : rv;
+		}
 		
 		public Nodeid secondParent(Nodeid nid) {
 			return secondParent.get(nid);
 		}
 
+		public Nodeid safeSecondParent(Nodeid nid) {
+			Nodeid rv = secondParent(nid);
+			return rv == null ? Nodeid.NULL : rv;
+		}
+
 		public boolean appendParentsOf(Nodeid nid, Collection<Nodeid> c) {
 			Nodeid p1 = firstParent(nid);
 			boolean modified = false;
--- a/src/com/tmate/hgkit/ll/RevlogStream.java	Fri Jan 14 23:22:20 2011 +0100
+++ b/src/com/tmate/hgkit/ll/RevlogStream.java	Sat Jan 15 01:15:38 2011 +0100
@@ -73,7 +73,12 @@
 		}
 	}
 	
-	public int findLocalRevisionNumber(Nodeid nodeid) {
+	// Perhaps, RevlogStream should be limited to use of plain int revisions for access,
+	// while Nodeids should be kept on the level up, in Revlog. Guess, Revlog better keep
+	// map of nodeids, and once this comes true, we may get rid of this method.
+	// Unlike its counterpart, Revlog#getLocalRevisionNumber, doesn't fail with exception if node not found,
+	// returns a predefined constant instead
+	/*package-local*/ int findLocalRevisionNumber(Nodeid nodeid) {
 		// XXX this one may be implemented with iterate() once there's mechanism to stop iterations
 		final int indexSize = revisionCount();
 		DataAccess daIndex = getIndexStream();
@@ -95,7 +100,7 @@
 		} finally {
 			daIndex.done();
 		}
-		throw new IllegalArgumentException(String.format("%s doesn't represent a revision of %s", nodeid.toString(), indexFile.getName() /*XXX HgDataFile.getPath might be more suitable here*/));
+		return Integer.MIN_VALUE;
 	}
 
 
@@ -143,7 +148,9 @@
 			daIndex.seek(inline ? (int) index.get(i).offset : i * REVLOGV1_RECORD_SIZE);
 			for (; i <= end; i++ ) {
 				long l = daIndex.readLong();
+				@SuppressWarnings("unused")
 				long offset = l >>> 16;
+				@SuppressWarnings("unused")
 				int flags = (int) (l & 0X0FFFF);
 				int compressedLen = daIndex.readInt();
 				int actualLen = daIndex.readInt();
@@ -233,6 +240,7 @@
 			while(true) {
 				int compressedLen = da.readInt();
 				// 8+4 = 12 bytes total read here
+				@SuppressWarnings("unused")
 				int actualLen = da.readInt();
 				int baseRevision = da.readInt();
 				// 12 + 8 = 20 bytes read here