Mercurial > hg4j
changeset 539:9edfd5a223b8
Commit: handle empty repository case
author | Artem Tikhomirov <tikhomirov.artem@gmail.com> |
---|---|
date | Wed, 13 Feb 2013 18:44:58 +0100 |
parents | dd4f6311af52 |
children | 67d4b0f73984 |
files | src/org/tmatesoft/hg/core/HgCloneCommand.java src/org/tmatesoft/hg/internal/FNCacheFile.java src/org/tmatesoft/hg/internal/Internals.java src/org/tmatesoft/hg/internal/LineReader.java src/org/tmatesoft/hg/internal/RevlogStream.java src/org/tmatesoft/hg/internal/RevlogStreamWriter.java src/org/tmatesoft/hg/repo/CommitFacility.java src/org/tmatesoft/hg/repo/HgRepository.java test/org/tmatesoft/hg/test/TestCommit.java |
diffstat | 9 files changed, 267 insertions(+), 47 deletions(-) [+] |
line wrap: on
line diff
--- a/src/org/tmatesoft/hg/core/HgCloneCommand.java Tue Feb 05 22:30:21 2013 +0100 +++ b/src/org/tmatesoft/hg/core/HgCloneCommand.java Wed Feb 13 18:44:58 2013 +0100 @@ -26,7 +26,6 @@ import java.io.IOException; import java.util.ArrayList; import java.util.Collections; -import java.util.LinkedList; import java.util.TreeMap; import org.tmatesoft.hg.internal.ByteArrayDataAccess; @@ -34,6 +33,8 @@ import org.tmatesoft.hg.internal.DataAccessProvider; import org.tmatesoft.hg.internal.DataSerializer; import org.tmatesoft.hg.internal.DigestHelper; +import org.tmatesoft.hg.internal.FNCacheFile; +import org.tmatesoft.hg.internal.Internals; import org.tmatesoft.hg.internal.Lifecycle; import org.tmatesoft.hg.internal.RepoInitializer; import org.tmatesoft.hg.internal.RevlogCompressor; @@ -49,6 +50,7 @@ import org.tmatesoft.hg.repo.HgRuntimeException; import org.tmatesoft.hg.util.CancelSupport; import org.tmatesoft.hg.util.CancelledException; +import org.tmatesoft.hg.util.Path; import org.tmatesoft.hg.util.PathRewrite; import org.tmatesoft.hg.util.ProgressSupport; @@ -155,7 +157,7 @@ // recently processed nodes last, so that index in the array may be used as a linkRevision or baseRevision private final ArrayList<Nodeid> revisionSequence = new ArrayList<Nodeid>(); - private final LinkedList<String> fncacheFiles = new LinkedList<String>(); + private FNCacheFile fncacheFile; private RepoInitializer repoInit; private Lifecycle.Callback lifecycleCallback; private CancelledException cancelException; @@ -176,15 +178,19 @@ public void initEmptyRepository() throws IOException { repoInit.initEmptyRepository(hgDir); + try { + assert (repoInit.getRequires() & FNCACHE) != 0; + fncacheFile = new FNCacheFile(Internals.getInstance(new HgLookup(ctx).detect(hgDir))); + } catch (HgRepositoryNotFoundException ex) { + // SHALL NOT HAPPEN provided we initialized empty repository successfully + // TODO perhaps, with WriteDownMate moving to a more appropriate location, + // we could instantiate HgRepository (or Internals) by other means, without exception? + throw new IOException("Can't access fncache for newly created repository", ex); + } } public void complete() throws IOException { - FileOutputStream fncacheFile = new FileOutputStream(new File(hgDir, "store/fncache")); - for (String s : fncacheFiles) { - fncacheFile.write(s.getBytes()); - fncacheFile.write(0x0A); // http://mercurial.selenic.com/wiki/fncacheRepoFormat - } - fncacheFile.close(); + fncacheFile.write(); } public void changelogStart() { @@ -237,8 +243,7 @@ try { revlogHeader.offset(0).baseRevision(-1); revisionSequence.clear(); - fncacheFiles.add("data/" + name + ".i"); // TODO post-1.0 this is pure guess, - // need to investigate more how filenames are kept in fncache + fncacheFile.add(Path.create(name)); File file = new File(hgDir, filename = storagePathHelper.rewrite(name).toString()); file.getParentFile().mkdirs(); indexFile = new FileOutputStream(file);
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/org/tmatesoft/hg/internal/FNCacheFile.java Wed Feb 13 18:44:58 2013 +0100 @@ -0,0 +1,82 @@ +/* + * Copyright (c) 2013 TMate Software Ltd + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; version 2 of the License. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * For information on how to redistribute this software under + * the terms of a license other than GNU General Public License + * contact TMate Software at support@hg4j.com + */ +package org.tmatesoft.hg.internal; + +import java.io.File; +import java.io.FileOutputStream; +import java.io.IOException; +import java.nio.charset.Charset; +import java.util.ArrayList; + +import org.tmatesoft.hg.util.Path; + +/** + * <blockquote> + * The fncache file contains the paths of all filelog files in the store as encoded by mercurial.filelog.encodedir. The paths are separated by '\n' (LF). + * </blockquote> + * @see http://mercurial.selenic.com/wiki/fncacheRepoFormat + * @author Artem Tikhomirov + * @author TMate Software Ltd. + */ +public class FNCacheFile { + + private final Internals repo; + private final ArrayList<Path> files; + + public FNCacheFile(Internals internalRepo) { + repo = internalRepo; + files = new ArrayList<Path>(); + } + + public void read(Path.Source pathFactory) throws IOException { + File f = fncacheFile(); + files.clear(); + if (!f.exists()) { + return; + } + ArrayList<String> entries = new ArrayList<String>(); + // names in fncache are in local encoding, shall translate to unicode + new LineReader(f, repo.getSessionContext().getLog(), repo.getFilenameEncoding()).read(new LineReader.SimpleLineCollector(), entries); + for (String e : entries) { + files.add(pathFactory.path(e)); + } + } + + public void write() throws IOException { + if (files.isEmpty()) { + return; + } + File f = fncacheFile(); + f.getParentFile().mkdirs(); + final Charset filenameEncoding = repo.getFilenameEncoding(); + FileOutputStream fncacheFile = new FileOutputStream(f); + for (Path p : files) { + String s = "data/" + p.toString() + ".i"; // TODO post-1.0 this is plain wrong. (a) likely need .d files, too; (b) what about dh/ location? + fncacheFile.write(s.getBytes(filenameEncoding)); + fncacheFile.write(0x0A); // http://mercurial.selenic.com/wiki/fncacheRepoFormat + } + fncacheFile.close(); + } + + public void add(Path p) { + files.add(p); + } + + private File fncacheFile() { + return repo.getFileFromStoreDir("fncache"); + } +}
--- a/src/org/tmatesoft/hg/internal/Internals.java Tue Feb 05 22:30:21 2013 +0100 +++ b/src/org/tmatesoft/hg/internal/Internals.java Wed Feb 13 18:44:58 2013 +0100 @@ -98,7 +98,6 @@ private final boolean shallCacheRevlogsInRepo; private final DataAccessProvider dataAccess; - @SuppressWarnings("unused") private final int requiresFlags; private final PathRewrite dataPathHelper; // access to file storage area (usually under .hg/store/data/), with filenames mangled @@ -200,8 +199,11 @@ } public EncodingHelper buildFileNameEncodingHelper() { - SessionContext ctx = repo.getSessionContext(); - return new EncodingHelper(getFileEncoding(ctx), ctx); + return new EncodingHelper(getFilenameEncoding(), repo.getSessionContext()); + } + + /*package-local*/ Charset getFilenameEncoding() { + return getFileEncoding(getSessionContext()); } /*package-local*/ static Charset getFileEncoding(SessionContext ctx) { @@ -230,6 +232,9 @@ return dataPathHelper.rewrite(df.getPath().toString()); } + public int getRequiresFlags() { + return requiresFlags; + } public static boolean runningOnWindows() { return System.getProperty("os.name").indexOf("Windows") != -1;
--- a/src/org/tmatesoft/hg/internal/LineReader.java Tue Feb 05 22:30:21 2013 +0100 +++ b/src/org/tmatesoft/hg/internal/LineReader.java Wed Feb 13 18:44:58 2013 +0100 @@ -1,5 +1,5 @@ /* - * Copyright (c) 2012 TMate Software Ltd + * Copyright (c) 2012-2013 TMate Software Ltd * * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by @@ -20,8 +20,12 @@ import java.io.BufferedReader; import java.io.File; +import java.io.FileInputStream; import java.io.FileReader; import java.io.IOException; +import java.io.InputStreamReader; +import java.io.Reader; +import java.nio.charset.Charset; import java.util.Collection; import org.tmatesoft.hg.repo.HgInvalidFileException; @@ -52,13 +56,19 @@ private final File file; private final LogFacility log; + private final Charset encoding; private boolean trimLines = true; private boolean skipEmpty = true; private String ignoreThatStarts = null; public LineReader(File f, LogFacility logFacility) { + this(f, logFacility, null); + } + + public LineReader(File f, LogFacility logFacility, Charset lineEncoding) { file = f; log = logFacility; + encoding = lineEncoding; } /** @@ -92,7 +102,13 @@ BufferedReader statusFileReader = null; try { // consumer.begin(file, paramObj); - statusFileReader = new BufferedReader(new FileReader(file)); + Reader fileReader; + if (encoding == null) { + fileReader = new FileReader(file); + } else { + fileReader = new InputStreamReader(new FileInputStream(file)); + } + statusFileReader = new BufferedReader(fileReader); String line; boolean ok = true; while (ok && (line = statusFileReader.readLine()) != null) {
--- a/src/org/tmatesoft/hg/internal/RevlogStream.java Tue Feb 05 22:30:21 2013 +0100 +++ b/src/org/tmatesoft/hg/internal/RevlogStream.java Wed Feb 13 18:44:58 2013 +0100 @@ -224,6 +224,10 @@ return BAD_REVISION; } + /** + * @return value suitable for the corresponding field in the new revision's header, not physical offset in the file + * (which is different in case of inline revlogs) + */ public long newEntryOffset() { if (revisionCount() == 0) { return 0; @@ -324,6 +328,31 @@ } } + void revisionAdded(int revisionIndex, Nodeid revision, int baseRevisionIndex, long revisionOffset) throws HgInvalidControlFileException { + if (!outlineCached()) { + return; + } + if (baseRevisions.length != revisionIndex) { + throw new HgInvalidControlFileException(String.format("New entry's index shall be %d, not %d", baseRevisions.length, revisionIndex), null, indexFile); + } + if (baseRevisionIndex < 0 || baseRevisionIndex > baseRevisions.length) { + // baseRevisionIndex MAY be == to baseRevisions.length, it's when new revision is based on itself + throw new HgInvalidControlFileException(String.format("Base revision index %d doesn't fit [0..%d] range", baseRevisionIndex, baseRevisions.length), null, indexFile); + } + assert revision != null; + assert !revision.isNull(); + int[] baseRevisionsCopy = new int[baseRevisions.length + 1]; + System.arraycopy(baseRevisions, 0, baseRevisionsCopy, 0, baseRevisions.length); + baseRevisionsCopy[baseRevisions.length] = baseRevisionIndex; + baseRevisions = baseRevisionsCopy; + if (inline && indexRecordOffset != null) { + assert indexRecordOffset.length == revisionIndex; + int[] indexRecordOffsetCopy = new int[indexRecordOffset.length + 1]; + indexRecordOffsetCopy[indexRecordOffset.length] = offsetFieldToInlineFileOffset(revisionOffset, revisionIndex); + indexRecordOffset = indexRecordOffsetCopy; + } + } + private int getBaseRevision(int revision) { return baseRevisions[revision]; } @@ -347,9 +376,25 @@ } return revisionIndex; } + + private boolean outlineCached() { + return baseRevisions != null && baseRevisions.length > 0; + } + + // translate 6-byte offset field value to pysical file offset for inline revlogs + // DOESN'T MAKE SENSE if revlog with data is separate + private static int offsetFieldToInlineFileOffset(long offset, int recordIndex) throws HgInvalidStateException { + int o = Internals.ltoi(offset); + if (o != offset) { + // just in case, can't happen, ever, unless HG (or some other bad tool) produces index file + // with inlined data of size greater than 2 Gb. + throw new HgInvalidStateException("Data too big, offset didn't fit to sizeof(int)"); + } + return o + REVLOGV1_RECORD_SIZE * recordIndex; + } private void initOutline() throws HgInvalidControlFileException { - if (baseRevisions != null && baseRevisions.length > 0) { + if (outlineCached()) { return; } DataAccess da = getIndexStream(); @@ -357,6 +402,8 @@ if (da.isEmpty()) { // do not fail with exception if stream is empty, it's likely intentional baseRevisions = new int[0]; + // empty revlog, likely to be populated, indicate we start with a single file + inline = true; return; } int versionField = da.readInt(); @@ -385,13 +432,8 @@ // byte[] nodeid = new byte[32]; resBases.add(baseRevision); if (inline) { - int o = Internals.ltoi(offset); - if (o != offset) { - // just in case, can't happen, ever, unless HG (or some other bad tool) produces index file - // with inlined data of size greater than 2 Gb. - throw new HgInvalidStateException("Data too big, offset didn't fit to sizeof(int)"); - } - resOffsets.add(o + REVLOGV1_RECORD_SIZE * resOffsets.size()); + int o = offsetFieldToInlineFileOffset(offset, resOffsets.size()); + resOffsets.add(o); da.skip(3*4 + 32 + compressedLen); // Check: 44 (skip) + 20 (read) = 64 (total RevlogNG record size) } else { da.skip(3*4 + 32); @@ -611,4 +653,5 @@ // implementers shall not invoke DataAccess.done(), it's accomplished by #iterate at appropraite moment void next(int revisionIndex, int actualLen, int baseRevision, int linkRevision, int parent1Revision, int parent2Revision, byte[/*20*/] nodeid, DataAccess data); } + }
--- a/src/org/tmatesoft/hg/internal/RevlogStreamWriter.java Tue Feb 05 22:30:21 2013 +0100 +++ b/src/org/tmatesoft/hg/internal/RevlogStreamWriter.java Wed Feb 13 18:44:58 2013 +0100 @@ -94,7 +94,8 @@ revlogHeader.linkRevision(linkRevision); revlogHeader.parents(p1, p2); revlogHeader.baseRevision(writeComplete ? lastEntryIndex+1 : lastEntryBase); - revlogHeader.offset(revlogStream.newEntryOffset()); + long lastEntryOffset = revlogStream.newEntryOffset(); + revlogHeader.offset(lastEntryOffset); // revlogHeader.serialize(indexFile); @@ -114,11 +115,14 @@ dataSource.serialize(dataFile); } + lastEntryContent = content; lastEntryBase = revlogHeader.baseRevision(); lastEntryIndex++; lastEntryRevision = Nodeid.fromBinary(revisionNodeidBytes, 0); revisionCache.put(lastEntryIndex, lastEntryRevision); + + revlogStream.revisionAdded(lastEntryIndex, lastEntryRevision, lastEntryBase, lastEntryOffset); } catch (IOException ex) { String m = String.format("Failed to write revision %d", lastEntryIndex+1, null); HgInvalidControlFileException t = new HgInvalidControlFileException(m, ex, null); @@ -149,7 +153,10 @@ } private void populateLastEntry() throws HgInvalidControlFileException { - if (lastEntryIndex != NO_REVISION && lastEntryContent == null) { + if (lastEntryContent != null) { + return; + } + if (lastEntryIndex != NO_REVISION) { assert lastEntryIndex >= 0; final IOException[] failure = new IOException[1]; revlogStream.iterate(lastEntryIndex, lastEntryIndex, true, new RevlogStream.Inspector() { @@ -168,6 +175,8 @@ String m = String.format("Failed to get content of most recent revision %d", lastEntryIndex); throw revlogStream.initWithDataFile(new HgInvalidControlFileException(m, failure[0], null)); } + } else { + lastEntryContent = new byte[0]; } } @@ -267,7 +276,7 @@ // regardless whether it's inline or separate data, // offset field always represent cumulative compressedLength - // (while offset in the index file with inline==true differs by n*sizeof(header), where n is entry's position in the file) + // (while physical offset in the index file with inline==true differs by n*sizeof(header), where n is entry's position in the file) offset += compressedLength; }
--- a/src/org/tmatesoft/hg/repo/CommitFacility.java Tue Feb 05 22:30:21 2013 +0100 +++ b/src/org/tmatesoft/hg/repo/CommitFacility.java Wed Feb 13 18:44:58 2013 +0100 @@ -18,7 +18,9 @@ import static org.tmatesoft.hg.repo.HgRepository.NO_REVISION; +import java.io.IOException; import java.nio.ByteBuffer; +import java.util.ArrayList; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.Map; @@ -29,11 +31,15 @@ import org.tmatesoft.hg.internal.ByteArrayChannel; import org.tmatesoft.hg.internal.ChangelogEntryBuilder; import org.tmatesoft.hg.internal.Experimental; +import org.tmatesoft.hg.internal.FNCacheFile; import org.tmatesoft.hg.internal.ManifestEntryBuilder; import org.tmatesoft.hg.internal.ManifestRevision; +import org.tmatesoft.hg.internal.RequiresFile; +import org.tmatesoft.hg.internal.RevlogStream; import org.tmatesoft.hg.internal.RevlogStreamWriter; import org.tmatesoft.hg.util.Pair; import org.tmatesoft.hg.util.Path; +import org.tmatesoft.hg.util.LogFacility.Severity; /** * WORK IN PROGRESS @@ -81,6 +87,16 @@ if (p2Commit != NO_REVISION) { repo.getManifest().walk(p2Commit, p2Commit, c2Manifest); } + FNCacheFile fncache = null; + if ((repo.getImplHelper().getRequiresFlags() & RequiresFile.FNCACHE) != 0) { + fncache = new FNCacheFile(repo.getImplHelper()); + try { + fncache.read(new Path.SimpleSource()); + } catch (IOException ex) { + // fncache may be restored using native client, so do not treat failure to read it as severe enough to stop + repo.getSessionContext().getLog().dump(getClass(), Severity.Error, ex, "Failed to read fncache, attempt commit nevertheless"); + } + } // Pair<Integer, Integer> manifestParents = getManifestParents(); Pair<Integer, Integer> manifestParents = new Pair<Integer, Integer>(c1Manifest.revisionIndex(), c2Manifest.revisionIndex()); TreeMap<Path, Nodeid> newManifestRevision = new TreeMap<Path, Nodeid>(); @@ -88,13 +104,21 @@ for (Path f : c1Manifest.files()) { HgDataFile df = repo.getFileNode(f); Nodeid fileKnownRev = c1Manifest.nodeid(f); - int fileRevIndex = df.getRevisionIndex(fileKnownRev); - // FIXME merged files?! - fileParents.put(f, new Pair<Integer, Integer>(fileRevIndex, NO_REVISION)); + final int fileRevIndex1 = df.getRevisionIndex(fileKnownRev); + final int fileRevIndex2; + if ((fileKnownRev = c2Manifest.nodeid(f)) != null) { + // merged files + fileRevIndex2 = df.getRevisionIndex(fileKnownRev); + } else { + fileRevIndex2 = NO_REVISION; + } + + fileParents.put(f, new Pair<Integer, Integer>(fileRevIndex1, fileRevIndex2)); newManifestRevision.put(f, fileKnownRev); } // // Files + ArrayList<Path> newlyAddedFiles = new ArrayList<Path>(); for (Pair<HgDataFile, ByteDataSupplier> e : files.values()) { HgDataFile df = e.first(); Pair<Integer, Integer> fp = fileParents.get(df.getPath()); @@ -111,7 +135,14 @@ bac.write(bb); bb.clear(); } - RevlogStreamWriter fileWriter = new RevlogStreamWriter(repo.getSessionContext(), df.content); + RevlogStream contentStream; + if (df.exists()) { + contentStream = df.content; + } else { + contentStream = repo.createStoreFile(df.getPath()); + newlyAddedFiles.add(df.getPath()); + } + RevlogStreamWriter fileWriter = new RevlogStreamWriter(repo.getSessionContext(), contentStream); Nodeid fileRev = fileWriter.addRevision(bac.toArray(), clogRevisionIndex, fp.first(), fp.second()); newManifestRevision.put(df.getPath(), fileRev); } @@ -130,6 +161,17 @@ byte[] clogContent = changelogBuilder.build(manifestRev, message); RevlogStreamWriter changelogWriter = new RevlogStreamWriter(repo.getSessionContext(), clog.content); Nodeid changesetRev = changelogWriter.addRevision(clogContent, clogRevisionIndex, p1Commit, p2Commit); + if (!newlyAddedFiles.isEmpty() && fncache != null) { + for (Path p : newlyAddedFiles) { + fncache.add(p); + } + try { + fncache.write(); + } catch (IOException ex) { + // see comment above for fnchache.read() + repo.getSessionContext().getLog().dump(getClass(), Severity.Error, ex, "Failed to write fncache, error ignored"); + } + } return changesetRev; } /*
--- a/src/org/tmatesoft/hg/repo/HgRepository.java Tue Feb 05 22:30:21 2013 +0100 +++ b/src/org/tmatesoft/hg/repo/HgRepository.java Wed Feb 13 18:44:58 2013 +0100 @@ -184,10 +184,6 @@ public HgChangelog getChangelog() { if (changelog == null) { File chlogFile = impl.getFileFromStoreDir("00changelog.i"); - if (!chlogFile.exists()) { - // fake its existence - chlogFile = fakeNonExistentFile(chlogFile); - } RevlogStream content = new RevlogStream(impl.getDataAccess(), chlogFile); changelog = new HgChangelog(this, content); } @@ -197,9 +193,6 @@ public HgManifest getManifest() { if (manifest == null) { File manifestFile = impl.getFileFromStoreDir("00manifest.i"); - if (!manifestFile.exists()) { - manifestFile = fakeNonExistentFile(manifestFile); - } RevlogStream content = new RevlogStream(impl.getDataAccess(), manifestFile); manifest = new HgManifest(this, content, impl.buildFileNameEncodingHelper()); } @@ -512,14 +505,20 @@ return null; } - private File fakeNonExistentFile(File expected) throws HgInvalidFileException { + /*package-local*/ RevlogStream createStoreFile(Path path) throws HgInvalidControlFileException { + File f = impl.getFileFromDataDir(path); try { - File fake = File.createTempFile(expected.getName(), null); - fake.deleteOnExit(); - return fake; + if (!f.exists()) { + f.getParentFile().mkdirs(); + f.createNewFile(); + } + RevlogStream s = new RevlogStream(impl.getDataAccess(), f); + if (impl.shallCacheRevlogs()) { + streamsCache.put(path, new SoftReference<RevlogStream>(s)); + } + return s; } catch (IOException ex) { - getSessionContext().getLog().dump(getClass(), Info, ex, null); - throw new HgInvalidFileException(String.format("Failed to fake existence of file %s", expected), ex); + throw new HgInvalidControlFileException("Can't create a file in the storage", ex, f); } }
--- a/test/org/tmatesoft/hg/test/TestCommit.java Tue Feb 05 22:30:21 2013 +0100 +++ b/test/org/tmatesoft/hg/test/TestCommit.java Wed Feb 13 18:44:58 2013 +0100 @@ -16,6 +16,8 @@ */ package org.tmatesoft.hg.test; +import static org.tmatesoft.hg.repo.HgRepository.NO_REVISION; + import java.io.File; import java.io.FileWriter; import java.nio.ByteBuffer; @@ -41,15 +43,32 @@ new ExecHelper(new OutputParser.Stub(true), repoLoc).run("hg", "commit", "--addremove", "-m", "FIRST"); // HgRepository hgRepo = new HgLookup().detect(repoLoc); - CommitFacility cf = new CommitFacility(hgRepo, 0 /*NO_REVISION*/); + CommitFacility cf = new CommitFacility(hgRepo, 0); + // FIXME test diff for processing changed newlines - if a whole line or just changed endings are in the patch! + cf.add(hgRepo.getFileNode("file1"), new ByteArraySupplier("hello\nworld".getBytes())); + cf.commit("SECOND"); + // /tmp/test-commit2non-empty/.hg/ store/data/file1.i dumpData + } + + @Test + public void testCommitToEmpty() throws Exception { + File repoLoc = RepoUtils.initEmptyTempRepo("test-commit2empty"); + FileWriter fw = new FileWriter(new File(repoLoc, "file1")); + fw.write("hello"); + fw.close(); + // + HgRepository hgRepo = new HgLookup().detect(repoLoc); + CommitFacility cf = new CommitFacility(hgRepo, NO_REVISION); // FIXME test diff for processing changed newlines - if a whole line or just changed endings are in the patch! cf.add(hgRepo.getFileNode("file1"), new ByteArraySupplier("hello\nworld".getBytes())); cf.commit("commit 1"); - // /tmp/test-commit2non-empty/.hg/ store/data/file1.i dumpData } - + public static void main(String[] args) throws Exception { - new TestCommit().testCommitToNonEmpty(); + new TestCommit().testCommitToEmpty(); + if (Boolean.TRUE.booleanValue()) { + return; + } String input = "abcdefghijklmnopqrstuvwxyz"; ByteArraySupplier bas = new ByteArraySupplier(input.getBytes()); ByteBuffer bb = ByteBuffer.allocate(7);