# HG changeset patch # User Artem Tikhomirov # Date 1360099821 -3600 # Node ID dd4f6311af52496914fdef4a151fd5abc1140d8d # Parent 5a455624be4f147c11d3860282f2067690dd12bd Commit: first working version diff -r 5a455624be4f -r dd4f6311af52 src/org/tmatesoft/hg/internal/ChangelogEntryBuilder.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/org/tmatesoft/hg/internal/ChangelogEntryBuilder.java Tue Feb 05 22:30:21 2013 +0100 @@ -0,0 +1,138 @@ +/* + * Copyright (c) 2012 TMate Software Ltd + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; version 2 of the License. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * For information on how to redistribute this software under + * the terms of a license other than GNU General Public License + * contact TMate Software at support@hg4j.com + */ +package org.tmatesoft.hg.internal; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.Iterator; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.TimeZone; +import java.util.Map.Entry; + +import org.tmatesoft.hg.core.Nodeid; +import org.tmatesoft.hg.util.Path; + +/** + * + * @author Artem Tikhomirov + * @author TMate Software Ltd. + */ +public class ChangelogEntryBuilder { + + private String user; + private List modifiedFiles; + private final Map extrasMap = new LinkedHashMap(); + private Integer tzOffset; + private Long csetTime; + + public ChangelogEntryBuilder user(String username) { + user = username; + return this; + } + + public String user() { + if (user == null) { + // for our testing purposes anything but null is ok. no reason to follow Hg username lookup conventions + user = System.getProperty("user.name"); + } + return user; + } + + public ChangelogEntryBuilder setModified(Collection files) { + modifiedFiles = new ArrayList(files == null ? Collections.emptyList() : files); + return this; + } + + public ChangelogEntryBuilder addModified(Collection files) { + if (modifiedFiles == null) { + return setModified(files); + } + modifiedFiles.addAll(files); + return this; + } + + public ChangelogEntryBuilder branch(String branchName) { + if (branchName == null || "default".equals(branchName)) { + extrasMap.remove("branch"); + } else { + extrasMap.put("branch", branchName); + } + return this; + } + + public ChangelogEntryBuilder extras(Map extras) { + extrasMap.clear(); + extrasMap.putAll(extras); + return this; + } + + public ChangelogEntryBuilder date(long seconds, int timezoneOffset) { + csetTime = seconds; + tzOffset = timezoneOffset; + return this; + } + + private long csetTime() { + if (csetTime != null) { + return csetTime; + } + return System.currentTimeMillis() / 1000; + } + + private int csetTimezone(long time) { + if (tzOffset != null) { + return tzOffset; + } + return -(TimeZone.getDefault().getOffset(time) / 1000); + } + + public byte[] build(Nodeid manifestRevision, String comment) { + String f = "%s\n%s\n%d %d %s\n%s\n\n%s"; + StringBuilder extras = new StringBuilder(); + for (Iterator> it = extrasMap.entrySet().iterator(); it.hasNext();) { + final Entry next = it.next(); + extras.append(encodeExtrasPair(next.getKey())); + extras.append(':'); + extras.append(encodeExtrasPair(next.getValue())); + if (it.hasNext()) { + extras.append('\00'); + } + } + StringBuilder files = new StringBuilder(); + if (modifiedFiles != null) { + for (Iterator it = modifiedFiles.iterator(); it.hasNext(); ) { + files.append(it.next()); + if (it.hasNext()) { + files.append('\n'); + } + } + } + final long date = csetTime(); + final int tz = csetTimezone(date); + return String.format(f, manifestRevision.toString(), user(), date, tz, extras, files, comment).getBytes(); + } + + private final static CharSequence encodeExtrasPair(String s) { + if (s != null) { + return s.replace("\\", "\\\\").replace("\n", "\\n").replace("\r", "\\r").replace("\00", "\\0"); + } + return s; + } +} diff -r 5a455624be4f -r dd4f6311af52 src/org/tmatesoft/hg/internal/DataAccessProvider.java --- a/src/org/tmatesoft/hg/internal/DataAccessProvider.java Tue Feb 05 20:06:22 2013 +0100 +++ b/src/org/tmatesoft/hg/internal/DataAccessProvider.java Tue Feb 05 22:30:21 2013 +0100 @@ -105,7 +105,7 @@ return new DataSerializer(); } try { - return new StreamDataSerializer(context.getLog(), new FileOutputStream(f)); + return new StreamDataSerializer(context.getLog(), new FileOutputStream(f, true)); } catch (final FileNotFoundException ex) { context.getLog().dump(getClass(), Error, ex, null); return new DataSerializer() { diff -r 5a455624be4f -r dd4f6311af52 src/org/tmatesoft/hg/internal/ManifestEntryBuilder.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/org/tmatesoft/hg/internal/ManifestEntryBuilder.java Tue Feb 05 22:30:21 2013 +0100 @@ -0,0 +1,61 @@ +/* + * Copyright (c) 2012 TMate Software Ltd + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; version 2 of the License. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * For information on how to redistribute this software under + * the terms of a license other than GNU General Public License + * contact TMate Software at support@hg4j.com + */ +package org.tmatesoft.hg.internal; + +import java.io.ByteArrayOutputStream; + +import org.tmatesoft.hg.core.Nodeid; + +/** + * Create binary manifest entry ready to write down into 00manifest.i + *

Usage: + *

+ *   ManifestEntryBuilder mb = new ManifestEntryBuilder();
+ *   mb.reset().add("file1", file1.getRevision(r1));
+ *   mb.add("file2", file2.getRevision(r2));
+ *   byte[] manifestRecordData = mb.build();
+ *   byte[] manifestRevlogHeader = buildRevlogHeader(..., sha1(parents, manifestRecordData), manifestRecordData.length);
+ *   manifestIndexOutputStream.write(manifestRevlogHeader);
+ *   manifestIndexOutputStream.write(manifestRecordData);
+ * 
+ * + * @author Artem Tikhomirov + * @author TMate Software Ltd. + */ +public class ManifestEntryBuilder { + private ByteArrayOutputStream buffer = new ByteArrayOutputStream(); + + + public ManifestEntryBuilder reset() { + buffer.reset(); + return this; + } + public ManifestEntryBuilder add(String fname, Nodeid revision) { + byte[] b = fname.getBytes(); + buffer.write(b, 0, b.length); + buffer.write('\0'); + b = revision.toString().getBytes(); + buffer.write(b, 0, b.length); + buffer.write('\n'); + return this; + } + + public byte[] build() { + return buffer.toByteArray(); + } + +} diff -r 5a455624be4f -r dd4f6311af52 src/org/tmatesoft/hg/internal/ManifestRevision.java --- a/src/org/tmatesoft/hg/internal/ManifestRevision.java Tue Feb 05 20:06:22 2013 +0100 +++ b/src/org/tmatesoft/hg/internal/ManifestRevision.java Tue Feb 05 22:30:21 2013 +0100 @@ -16,6 +16,8 @@ */ package org.tmatesoft.hg.internal; +import static org.tmatesoft.hg.repo.HgRepository.NO_REVISION; + import java.util.Collection; import java.util.TreeMap; @@ -36,8 +38,8 @@ private final TreeMap flagsMap; private final Convertor idsPool; private final Convertor namesPool; - private Nodeid manifestRev; - private int changelogRevIndex, manifestRevIndex; + private Nodeid manifestRev = Nodeid.NULL; + private int changelogRevIndex = NO_REVISION, manifestRevIndex = NO_REVISION; // optional pools for effective management of nodeids and filenames (they are likely // to be duplicated among different manifest revisions diff -r 5a455624be4f -r dd4f6311af52 src/org/tmatesoft/hg/internal/PatchGenerator.java --- a/src/org/tmatesoft/hg/internal/PatchGenerator.java Tue Feb 05 20:06:22 2013 +0100 +++ b/src/org/tmatesoft/hg/internal/PatchGenerator.java Tue Feb 05 22:30:21 2013 +0100 @@ -25,7 +25,7 @@ import org.tmatesoft.hg.repo.HgRepository; /** - * Mercurial cares about changes only up to the line level, e.g. a simple file version bump in manifest looks like (RevlogDump output): + * Mercurial cares about changes only up to the line level, e.g. a simple file version dump in manifest looks like (RevlogDump output): * * 522: 233748 0 103 17438 433 522 521 -1 756073cf2321df44d3ed0585f2a5754bc8a1b2f6 * : @@ -177,6 +177,12 @@ } public static void main(String[] args) throws Exception { + PatchGenerator pg1 = new PatchGenerator(); + pg1.init("hello".getBytes(), "hello\nworld".getBytes()); + pg1.findMatchingBlocks(); + if (Boolean.TRUE.booleanValue()) { + return; + } HgRepository repo = new HgLookup().detectFromWorkingDir(); HgDataFile df = repo.getFileNode("cmdline/org/tmatesoft/hg/console/Main.java"); ByteArrayChannel bac1, bac2; @@ -223,6 +229,8 @@ if (lastStart < input.length) { lines.add(new ByteChain(lastStart, input.length)); } + // empty chunk to keep offset of input end + lines.add(new ByteChain(input.length, input.length)); } public ByteChain chunk(int index) { diff -r 5a455624be4f -r dd4f6311af52 src/org/tmatesoft/hg/internal/RevlogStream.java --- a/src/org/tmatesoft/hg/internal/RevlogStream.java Tue Feb 05 20:06:22 2013 +0100 +++ b/src/org/tmatesoft/hg/internal/RevlogStream.java Tue Feb 05 22:30:21 2013 +0100 @@ -223,6 +223,27 @@ } return BAD_REVISION; } + + public long newEntryOffset() { + if (revisionCount() == 0) { + return 0; + } + DataAccess daIndex = getIndexStream(); + int lastRev = revisionCount() - 1; + try { + int recordOffset = getIndexOffsetInt(lastRev); + daIndex.seek(recordOffset); + long value = daIndex.readLong(); + value = value >>> 16; + int compressedLen = daIndex.readInt(); + return lastRev == 0 ? compressedLen : value + compressedLen; + } catch (IOException ex) { + throw new HgInvalidControlFileException("Linked revision lookup failed", ex, indexFile).setRevisionIndex(lastRev); + } finally { + daIndex.done(); + } + } + // should be possible to use TIP, ALL, or -1, -2, -n notation of Hg diff -r 5a455624be4f -r dd4f6311af52 src/org/tmatesoft/hg/internal/RevlogStreamWriter.java --- a/src/org/tmatesoft/hg/internal/RevlogStreamWriter.java Tue Feb 05 20:06:22 2013 +0100 +++ b/src/org/tmatesoft/hg/internal/RevlogStreamWriter.java Tue Feb 05 22:30:21 2013 +0100 @@ -36,7 +36,157 @@ */ public class RevlogStreamWriter { + private final DigestHelper dh = new DigestHelper(); + private final RevlogCompressor revlogDataZip; + private int lastEntryBase, lastEntryIndex; + private byte[] lastEntryContent; + private Nodeid lastEntryRevision; + private IntMap revisionCache = new IntMap(32); + private RevlogStream revlogStream; + public RevlogStreamWriter(SessionContext ctx, RevlogStream stream) { + assert ctx != null; + assert stream != null; + + revlogDataZip = new RevlogCompressor(ctx); + revlogStream = stream; + } + + /** + * @return nodeid of added revision + */ + public Nodeid addRevision(byte[] content, int linkRevision, int p1, int p2) { + lastEntryRevision = Nodeid.NULL; + int revCount = revlogStream.revisionCount(); + lastEntryIndex = revCount == 0 ? NO_REVISION : revCount - 1; + populateLastEntry(); + // + PatchGenerator pg = new PatchGenerator(); + Patch patch = pg.delta(lastEntryContent, content); + int patchSerializedLength = patch.serializedLength(); + + final boolean writeComplete = preferCompleteOverPatch(patchSerializedLength, content.length); + DataSerializer.DataSource dataSource = writeComplete ? new DataSerializer.ByteArrayDataSource(content) : patch.new PatchDataSource(); + revlogDataZip.reset(dataSource); + final int compressedLen; + final boolean useCompressedData = preferCompressedOverComplete(revlogDataZip.getCompressedLength(), dataSource.serializeLength()); + if (useCompressedData) { + compressedLen= revlogDataZip.getCompressedLength(); + } else { + // compression wasn't too effective, + compressedLen = dataSource.serializeLength() + 1 /*1 byte for 'u' - uncompressed prefix byte*/; + } + // + Nodeid p1Rev = revision(p1); + Nodeid p2Rev = revision(p2); + byte[] revisionNodeidBytes = dh.sha1(p1Rev, p2Rev, content).asBinary(); + // + + DataSerializer indexFile, dataFile, activeFile; + indexFile = dataFile = activeFile = null; + try { + // + activeFile = indexFile = revlogStream.getIndexStreamWriter(); + final boolean isInlineData = revlogStream.isInlineData(); + HeaderWriter revlogHeader = new HeaderWriter(isInlineData); + revlogHeader.length(content.length, compressedLen); + revlogHeader.nodeid(revisionNodeidBytes); + revlogHeader.linkRevision(linkRevision); + revlogHeader.parents(p1, p2); + revlogHeader.baseRevision(writeComplete ? lastEntryIndex+1 : lastEntryBase); + revlogHeader.offset(revlogStream.newEntryOffset()); + // + revlogHeader.serialize(indexFile); + + if (isInlineData) { + dataFile = indexFile; + } else { + dataFile = revlogStream.getDataStreamWriter(); + } + activeFile = dataFile; + if (useCompressedData) { + int actualCompressedLenWritten = revlogDataZip.writeCompressedData(dataFile); + if (actualCompressedLenWritten != compressedLen) { + throw new HgInvalidStateException(String.format("Expected %d bytes of compressed data, but actually wrote %d in %s", compressedLen, actualCompressedLenWritten, revlogStream.getDataFileName())); + } + } else { + dataFile.writeByte((byte) 'u'); + dataSource.serialize(dataFile); + } + + lastEntryContent = content; + lastEntryBase = revlogHeader.baseRevision(); + lastEntryIndex++; + lastEntryRevision = Nodeid.fromBinary(revisionNodeidBytes, 0); + revisionCache.put(lastEntryIndex, lastEntryRevision); + } catch (IOException ex) { + String m = String.format("Failed to write revision %d", lastEntryIndex+1, null); + HgInvalidControlFileException t = new HgInvalidControlFileException(m, ex, null); + if (activeFile == dataFile) { + throw revlogStream.initWithDataFile(t); + } else { + throw revlogStream.initWithIndexFile(t); + } + } finally { + indexFile.done(); + if (dataFile != null && dataFile != indexFile) { + dataFile.done(); + } + } + return lastEntryRevision; + } + + private Nodeid revision(int revisionIndex) { + if (revisionIndex == NO_REVISION) { + return Nodeid.NULL; + } + Nodeid n = revisionCache.get(revisionIndex); + if (n == null) { + n = Nodeid.fromBinary(revlogStream.nodeid(revisionIndex), 0); + revisionCache.put(revisionIndex, n); + } + return n; + } + + private void populateLastEntry() throws HgInvalidControlFileException { + if (lastEntryIndex != NO_REVISION && lastEntryContent == null) { + assert lastEntryIndex >= 0; + final IOException[] failure = new IOException[1]; + revlogStream.iterate(lastEntryIndex, lastEntryIndex, true, new RevlogStream.Inspector() { + + public void next(int revisionIndex, int actualLen, int baseRevision, int linkRevision, int parent1Revision, int parent2Revision, byte[] nodeid, DataAccess data) { + try { + lastEntryBase = baseRevision; + lastEntryRevision = Nodeid.fromBinary(nodeid, 0); + lastEntryContent = data.byteArray(); + } catch (IOException ex) { + failure[0] = ex; + } + } + }); + if (failure[0] != null) { + String m = String.format("Failed to get content of most recent revision %d", lastEntryIndex); + throw revlogStream.initWithDataFile(new HgInvalidControlFileException(m, failure[0], null)); + } + } + } + + public static boolean preferCompleteOverPatch(int patchLength, int fullContentLength) { + return !decideWorthEffort(patchLength, fullContentLength); + } + + public static boolean preferCompressedOverComplete(int compressedLen, int fullContentLength) { + if (compressedLen <= 0) { // just in case, meaningless otherwise + return false; + } + return decideWorthEffort(compressedLen, fullContentLength); + } + + // true if length obtained with effort is worth it + private static boolean decideWorthEffort(int lengthWithExtraEffort, int lengthWithoutEffort) { + return lengthWithExtraEffort < (/* 3/4 of original */lengthWithoutEffort - (lengthWithoutEffort >>> 2)); + } + /*XXX public because HgCloneCommand uses it*/ public static class HeaderWriter implements DataSerializer.DataSource { private final ByteBuffer header; @@ -125,147 +275,4 @@ return header.capacity(); } } - - private final DigestHelper dh = new DigestHelper(); - private final RevlogCompressor revlogDataZip; - - - public RevlogStreamWriter(SessionContext ctx, RevlogStream stream) { - revlogDataZip = new RevlogCompressor(ctx); - } - - private int lastEntryBase, lastEntryIndex; - private byte[] lastEntryContent; - private Nodeid lastEntryRevision; - private IntMap revisionCache = new IntMap(32); - - public void addRevision(byte[] content, int linkRevision, int p1, int p2) { - int revCount = revlogStream.revisionCount(); - lastEntryIndex = revCount == 0 ? NO_REVISION : revCount - 1; - populateLastEntry(); - // - PatchGenerator pg = new PatchGenerator(); - Patch patch = pg.delta(lastEntryContent, content); - int patchSerializedLength = patch.serializedLength(); - - final boolean writeComplete = preferCompleteOverPatch(patchSerializedLength, content.length); - DataSerializer.DataSource dataSource = writeComplete ? new DataSerializer.ByteArrayDataSource(content) : patch.new PatchDataSource(); - revlogDataZip.reset(dataSource); - final int compressedLen; - final boolean useUncompressedData = preferCompressedOverComplete(revlogDataZip.getCompressedLength(), dataSource.serializeLength()); - if (useUncompressedData) { - // compression wasn't too effective, - compressedLen = dataSource.serializeLength() + 1 /*1 byte for 'u' - uncompressed prefix byte*/; - } else { - compressedLen= revlogDataZip.getCompressedLength(); - } - // - Nodeid p1Rev = revision(p1); - Nodeid p2Rev = revision(p2); - byte[] revisionNodeidBytes = dh.sha1(p1Rev, p2Rev, content).asBinary(); - // - - DataSerializer indexFile, dataFile, activeFile; - indexFile = dataFile = activeFile = null; - try { - // - activeFile = indexFile = revlogStream.getIndexStreamWriter(); - final boolean isInlineData = revlogStream.isInlineData(); - HeaderWriter revlogHeader = new HeaderWriter(isInlineData); - revlogHeader.length(content.length, compressedLen); - revlogHeader.nodeid(revisionNodeidBytes); - revlogHeader.linkRevision(linkRevision); - revlogHeader.parents(p1, p2); - revlogHeader.baseRevision(writeComplete ? lastEntryIndex+1 : lastEntryBase); - // - revlogHeader.serialize(indexFile); - - if (isInlineData) { - dataFile = indexFile; - } else { - dataFile = revlogStream.getDataStreamWriter(); - } - activeFile = dataFile; - if (useUncompressedData) { - dataFile.writeByte((byte) 'u'); - dataSource.serialize(dataFile); - } else { - int actualCompressedLenWritten = revlogDataZip.writeCompressedData(dataFile); - if (actualCompressedLenWritten != compressedLen) { - throw new HgInvalidStateException(String.format("Expected %d bytes of compressed data, but actually wrote %d in %s", compressedLen, actualCompressedLenWritten, revlogStream.getDataFileName())); - } - } - - lastEntryContent = content; - lastEntryBase = revlogHeader.baseRevision(); - lastEntryIndex++; - lastEntryRevision = Nodeid.fromBinary(revisionNodeidBytes, 0); - revisionCache.put(lastEntryIndex, lastEntryRevision); - } catch (IOException ex) { - String m = String.format("Failed to write revision %d", lastEntryIndex+1, null); - HgInvalidControlFileException t = new HgInvalidControlFileException(m, ex, null); - if (activeFile == dataFile) { - throw revlogStream.initWithDataFile(t); - } else { - throw revlogStream.initWithIndexFile(t); - } - } finally { - indexFile.done(); - if (dataFile != null && dataFile != indexFile) { - dataFile.done(); - } - } - } - - private RevlogStream revlogStream; - private Nodeid revision(int revisionIndex) { - if (revisionIndex == NO_REVISION) { - return Nodeid.NULL; - } - Nodeid n = revisionCache.get(revisionIndex); - if (n == null) { - n = Nodeid.fromBinary(revlogStream.nodeid(revisionIndex), 0); - revisionCache.put(revisionIndex, n); - } - return n; - } - - private void populateLastEntry() throws HgInvalidControlFileException { - if (lastEntryIndex != NO_REVISION && lastEntryContent == null) { - assert lastEntryIndex >= 0; - final IOException[] failure = new IOException[1]; - revlogStream.iterate(lastEntryIndex, lastEntryIndex, true, new RevlogStream.Inspector() { - - public void next(int revisionIndex, int actualLen, int baseRevision, int linkRevision, int parent1Revision, int parent2Revision, byte[] nodeid, DataAccess data) { - try { - lastEntryBase = baseRevision; - lastEntryRevision = Nodeid.fromBinary(nodeid, 0); - lastEntryContent = data.byteArray(); - } catch (IOException ex) { - failure[0] = ex; - } - } - }); - if (failure[0] != null) { - String m = String.format("Failed to get content of most recent revision %d", lastEntryIndex); - throw revlogStream.initWithDataFile(new HgInvalidControlFileException(m, failure[0], null)); - } - } - } - - public static boolean preferCompleteOverPatch(int patchLength, int fullContentLength) { - return !decideWorthEffort(patchLength, fullContentLength); - } - - public static boolean preferCompressedOverComplete(int compressedLen, int fullContentLength) { - if (compressedLen <= 0) { // just in case, meaningless otherwise - return false; - } - return decideWorthEffort(compressedLen, fullContentLength); - } - - // true if length obtained with effort is worth it - private static boolean decideWorthEffort(int lengthWithExtraEffort, int lengthWithoutEffort) { - return lengthWithExtraEffort < (/* 3/4 of original */lengthWithoutEffort - (lengthWithoutEffort >>> 2)); - } } diff -r 5a455624be4f -r dd4f6311af52 src/org/tmatesoft/hg/repo/CommitFacility.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/org/tmatesoft/hg/repo/CommitFacility.java Tue Feb 05 22:30:21 2013 +0100 @@ -0,0 +1,162 @@ +/* + * Copyright (c) 2013 TMate Software Ltd + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; version 2 of the License. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * For information on how to redistribute this software under + * the terms of a license other than GNU General Public License + * contact TMate Software at support@hg4j.com + */ +package org.tmatesoft.hg.repo; + +import static org.tmatesoft.hg.repo.HgRepository.NO_REVISION; + +import java.nio.ByteBuffer; +import java.util.HashMap; +import java.util.LinkedHashMap; +import java.util.Map; +import java.util.TreeMap; + +import org.tmatesoft.hg.core.HgRepositoryLockException; +import org.tmatesoft.hg.core.Nodeid; +import org.tmatesoft.hg.internal.ByteArrayChannel; +import org.tmatesoft.hg.internal.ChangelogEntryBuilder; +import org.tmatesoft.hg.internal.Experimental; +import org.tmatesoft.hg.internal.ManifestEntryBuilder; +import org.tmatesoft.hg.internal.ManifestRevision; +import org.tmatesoft.hg.internal.RevlogStreamWriter; +import org.tmatesoft.hg.util.Pair; +import org.tmatesoft.hg.util.Path; + +/** + * WORK IN PROGRESS + * + * @author Artem Tikhomirov + * @author TMate Software Ltd. + */ +@Experimental(reason="Work in progress") +public class CommitFacility { + private final HgRepository repo; + private final int p1Commit, p2Commit; + private Map> files = new LinkedHashMap>(); + + + public CommitFacility(HgRepository hgRepo, int parentCommit) { + this(hgRepo, parentCommit, NO_REVISION); + } + + public CommitFacility(HgRepository hgRepo, int parent1Commit, int parent2Commit) { + repo = hgRepo; + p1Commit = parent1Commit; + p2Commit = parent2Commit; + if (parent1Commit != NO_REVISION && parent1Commit == parent2Commit) { + throw new IllegalArgumentException("Merging same revision is dubious"); + } + } + + public boolean isMerge() { + return p1Commit != NO_REVISION && p2Commit != NO_REVISION; + } + + public void add(HgDataFile dataFile, ByteDataSupplier content) { + files.put(dataFile.getPath(), new Pair(dataFile, content)); + } + + public Nodeid commit(String message) throws HgRepositoryLockException { + + final HgChangelog clog = repo.getChangelog(); + final int clogRevisionIndex = clog.getRevisionCount(); + ManifestRevision c1Manifest = new ManifestRevision(null, null); + ManifestRevision c2Manifest = new ManifestRevision(null, null); + if (p1Commit != NO_REVISION) { + repo.getManifest().walk(p1Commit, p1Commit, c1Manifest); + } + if (p2Commit != NO_REVISION) { + repo.getManifest().walk(p2Commit, p2Commit, c2Manifest); + } +// Pair manifestParents = getManifestParents(); + Pair manifestParents = new Pair(c1Manifest.revisionIndex(), c2Manifest.revisionIndex()); + TreeMap newManifestRevision = new TreeMap(); + HashMap> fileParents = new HashMap>(); + for (Path f : c1Manifest.files()) { + HgDataFile df = repo.getFileNode(f); + Nodeid fileKnownRev = c1Manifest.nodeid(f); + int fileRevIndex = df.getRevisionIndex(fileKnownRev); + // FIXME merged files?! + fileParents.put(f, new Pair(fileRevIndex, NO_REVISION)); + newManifestRevision.put(f, fileKnownRev); + } + // + // Files + for (Pair e : files.values()) { + HgDataFile df = e.first(); + Pair fp = fileParents.get(df.getPath()); + if (fp == null) { + // NEW FILE + fp = new Pair(NO_REVISION, NO_REVISION); + } + ByteDataSupplier bds = e.second(); + // FIXME quickfix, instead, pass ByteDataSupplier directly to RevlogStreamWriter + ByteBuffer bb = ByteBuffer.allocate(2048); + ByteArrayChannel bac = new ByteArrayChannel(); + while (bds.read(bb) != -1) { + bb.flip(); + bac.write(bb); + bb.clear(); + } + RevlogStreamWriter fileWriter = new RevlogStreamWriter(repo.getSessionContext(), df.content); + Nodeid fileRev = fileWriter.addRevision(bac.toArray(), clogRevisionIndex, fp.first(), fp.second()); + newManifestRevision.put(df.getPath(), fileRev); + } + // + // Manifest + final ManifestEntryBuilder manifestBuilder = new ManifestEntryBuilder(); + for (Map.Entry me : newManifestRevision.entrySet()) { + manifestBuilder.add(me.getKey().toString(), me.getValue()); + } + RevlogStreamWriter manifestWriter = new RevlogStreamWriter(repo.getSessionContext(), repo.getManifest().content); + Nodeid manifestRev = manifestWriter.addRevision(manifestBuilder.build(), clogRevisionIndex, manifestParents.first(), manifestParents.second()); + // + // Changelog + final ChangelogEntryBuilder changelogBuilder = new ChangelogEntryBuilder(); + changelogBuilder.setModified(files.keySet()); + byte[] clogContent = changelogBuilder.build(manifestRev, message); + RevlogStreamWriter changelogWriter = new RevlogStreamWriter(repo.getSessionContext(), clog.content); + Nodeid changesetRev = changelogWriter.addRevision(clogContent, clogRevisionIndex, p1Commit, p2Commit); + return changesetRev; + } +/* + private Pair getManifestParents() { + return new Pair(extractManifestRevisionIndex(p1Commit), extractManifestRevisionIndex(p2Commit)); + } + + private int extractManifestRevisionIndex(int clogRevIndex) { + if (clogRevIndex == NO_REVISION) { + return NO_REVISION; + } + RawChangeset commitObject = repo.getChangelog().range(clogRevIndex, clogRevIndex).get(0); + Nodeid manifestRev = commitObject.manifest(); + if (manifestRev.isNull()) { + return NO_REVISION; + } + return repo.getManifest().getRevisionIndex(manifestRev); + } +*/ + + // unlike DataAccess (which provides structured access), this one + // deals with a sequence of bytes, when there's no need in structure of the data + public interface ByteDataSupplier { // TODO look if can resolve DataAccess in HgCloneCommand visibility issue + int read(ByteBuffer buf); + } + + public interface ByteDataConsumer { + void write(ByteBuffer buf); + } +} diff -r 5a455624be4f -r dd4f6311af52 src/org/tmatesoft/hg/repo/Revlog.java --- a/src/org/tmatesoft/hg/repo/Revlog.java Tue Feb 05 20:06:22 2013 +0100 +++ b/src/org/tmatesoft/hg/repo/Revlog.java Tue Feb 05 22:30:21 2013 +0100 @@ -392,7 +392,7 @@ pw.init(); return pw; } - + /* * class with cancel and few other exceptions support. TODO consider general superclass to share with e.g. HgManifestCommand.Mediator */ diff -r 5a455624be4f -r dd4f6311af52 test/org/tmatesoft/hg/test/TestCommit.java --- a/test/org/tmatesoft/hg/test/TestCommit.java Tue Feb 05 20:06:22 2013 +0100 +++ b/test/org/tmatesoft/hg/test/TestCommit.java Tue Feb 05 22:30:21 2013 +0100 @@ -16,8 +16,14 @@ */ package org.tmatesoft.hg.test; -import org.junit.Assert; +import java.io.File; +import java.io.FileWriter; +import java.nio.ByteBuffer; + import org.junit.Test; +import org.tmatesoft.hg.repo.CommitFacility; +import org.tmatesoft.hg.repo.HgLookup; +import org.tmatesoft.hg.repo.HgRepository; /** * @@ -27,7 +33,61 @@ public class TestCommit { @Test - public void testCommitToEmpty() throws Exception { - Assert.fail(); + public void testCommitToNonEmpty() throws Exception { + File repoLoc = RepoUtils.initEmptyTempRepo("test-commit2non-empty"); + FileWriter fw = new FileWriter(new File(repoLoc, "file1")); + fw.write("hello"); + fw.close(); + new ExecHelper(new OutputParser.Stub(true), repoLoc).run("hg", "commit", "--addremove", "-m", "FIRST"); + // + HgRepository hgRepo = new HgLookup().detect(repoLoc); + CommitFacility cf = new CommitFacility(hgRepo, 0 /*NO_REVISION*/); + // FIXME test diff for processing changed newlines - if a whole line or just changed endings are in the patch! + cf.add(hgRepo.getFileNode("file1"), new ByteArraySupplier("hello\nworld".getBytes())); + cf.commit("commit 1"); + // /tmp/test-commit2non-empty/.hg/ store/data/file1.i dumpData + } + + public static void main(String[] args) throws Exception { + new TestCommit().testCommitToNonEmpty(); + String input = "abcdefghijklmnopqrstuvwxyz"; + ByteArraySupplier bas = new ByteArraySupplier(input.getBytes()); + ByteBuffer bb = ByteBuffer.allocate(7); + byte[] result = new byte[26]; + int rpos = 0; + while (bas.read(bb) != -1) { + bb.flip(); + bb.get(result, rpos, bb.limit()); + rpos += bb.limit(); + bb.clear(); + } + if (input.length() != rpos) { + throw new AssertionError(); + } + String output = new String(result); + if (!input.equals(output)) { + throw new AssertionError(); + } + System.out.println(output); + } + + static class ByteArraySupplier implements CommitFacility.ByteDataSupplier { + + private final byte[] data; + private int pos = 0; + + public ByteArraySupplier(byte[] source) { + data = source; + } + + public int read(ByteBuffer buf) { + if (pos >= data.length) { + return -1; + } + int count = Math.min(buf.remaining(), data.length - pos); + buf.put(data, pos, count); + pos += count; + return count; + } } } diff -r 5a455624be4f -r dd4f6311af52 test/org/tmatesoft/hg/tools/ChangelogEntryBuilder.java --- a/test/org/tmatesoft/hg/tools/ChangelogEntryBuilder.java Tue Feb 05 20:06:22 2013 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,136 +0,0 @@ -/* - * Copyright (c) 2012 TMate Software Ltd - * - * This program is free software; you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation; version 2 of the License. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * For information on how to redistribute this software under - * the terms of a license other than GNU General Public License - * contact TMate Software at support@hg4j.com - */ -package org.tmatesoft.hg.tools; - -import java.util.ArrayList; -import java.util.Collections; -import java.util.Iterator; -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Map; -import java.util.TimeZone; -import java.util.Map.Entry; - -import org.tmatesoft.hg.core.Nodeid; - -/** - * - * @author Artem Tikhomirov - * @author TMate Software Ltd. - */ -public class ChangelogEntryBuilder { - - private String user; - private List modifiedFiles; - private final Map extrasMap = new LinkedHashMap(); - private Integer tzOffset; - private Long csetTime; - - public ChangelogEntryBuilder user(String username) { - user = username; - return this; - } - - public String user() { - if (user == null) { - // for our testing purposes anything but null is ok. no reason to follow Hg username lookup conventions - user = System.getProperty("user.name"); - } - return user; - } - - public ChangelogEntryBuilder setModified(List files) { - modifiedFiles = new ArrayList(files == null ? Collections.emptyList() : files); - return this; - } - - public ChangelogEntryBuilder addModified(List files) { - if (modifiedFiles == null) { - return setModified(files); - } - modifiedFiles.addAll(files); - return this; - } - - public ChangelogEntryBuilder branch(String branchName) { - if (branchName == null || "default".equals(branchName)) { - extrasMap.remove("branch"); - } else { - extrasMap.put("branch", branchName); - } - return this; - } - - public ChangelogEntryBuilder extras(Map extras) { - extrasMap.clear(); - extrasMap.putAll(extras); - return this; - } - - public ChangelogEntryBuilder date(long seconds, int timezoneOffset) { - csetTime = seconds; - tzOffset = timezoneOffset; - return this; - } - - private long csetTime() { - if (csetTime != null) { - return csetTime; - } - return System.currentTimeMillis() / 1000; - } - - private int csetTimezone(long time) { - if (tzOffset != null) { - return tzOffset; - } - return -(TimeZone.getDefault().getOffset(time) / 1000); - } - - public byte[] build(Nodeid manifestRevision, String comment) { - String f = "%s\n%s\n%d %d %s\n%s\n\n%s"; - StringBuilder extras = new StringBuilder(); - for (Iterator> it = extrasMap.entrySet().iterator(); it.hasNext();) { - final Entry next = it.next(); - extras.append(encodeExtrasPair(next.getKey())); - extras.append(':'); - extras.append(encodeExtrasPair(next.getValue())); - if (it.hasNext()) { - extras.append('\00'); - } - } - StringBuilder files = new StringBuilder(); - if (modifiedFiles != null) { - for (Iterator it = modifiedFiles.iterator(); it.hasNext(); ) { - files.append(it.next()); - if (it.hasNext()) { - files.append('\n'); - } - } - } - final long date = csetTime(); - final int tz = csetTimezone(date); - return String.format(f, manifestRevision.toString(), user(), date, tz, extras, files, comment).getBytes(); - } - - private final static CharSequence encodeExtrasPair(String s) { - if (s != null) { - return s.replace("\\", "\\\\").replace("\n", "\\n").replace("\r", "\\r").replace("\00", "\\0"); - } - return s; - } -} diff -r 5a455624be4f -r dd4f6311af52 test/org/tmatesoft/hg/tools/ManifestEntryBuilder.java --- a/test/org/tmatesoft/hg/tools/ManifestEntryBuilder.java Tue Feb 05 20:06:22 2013 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,61 +0,0 @@ -/* - * Copyright (c) 2012 TMate Software Ltd - * - * This program is free software; you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation; version 2 of the License. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * For information on how to redistribute this software under - * the terms of a license other than GNU General Public License - * contact TMate Software at support@hg4j.com - */ -package org.tmatesoft.hg.tools; - -import java.io.ByteArrayOutputStream; - -import org.tmatesoft.hg.core.Nodeid; - -/** - * Create binary manifest entry ready to write down into 00manifest.i - *

Usage: - *

- *   ManifestEntryBuilder mb = new ManifestEntryBuilder();
- *   mb.reset().add("file1", file1.getRevision(r1));
- *   mb.add("file2", file2.getRevision(r2));
- *   byte[] manifestRecordData = mb.build();
- *   byte[] manifestRevlogHeader = buildRevlogHeader(..., sha1(parents, manifestRecordData), manifestRecordData.length);
- *   manifestIndexOutputStream.write(manifestRevlogHeader);
- *   manifestIndexOutputStream.write(manifestRecordData);
- * 
- * - * @author Artem Tikhomirov - * @author TMate Software Ltd. - */ -public class ManifestEntryBuilder { - private ByteArrayOutputStream buffer = new ByteArrayOutputStream(); - - - public ManifestEntryBuilder reset() { - buffer.reset(); - return this; - } - public ManifestEntryBuilder add(String fname, Nodeid revision) { - byte[] b = fname.getBytes(); - buffer.write(b, 0, b.length); - buffer.write('\0'); - b = revision.toString().getBytes(); - buffer.write(b, 0, b.length); - buffer.write('\n'); - return this; - } - - public byte[] build() { - return buffer.toByteArray(); - } - -}