tikhomirov@704: /* tikhomirov@704: * Copyright (c) 2013 TMate Software Ltd tikhomirov@704: * tikhomirov@704: * This program is free software; you can redistribute it and/or modify tikhomirov@704: * it under the terms of the GNU General Public License as published by tikhomirov@704: * the Free Software Foundation; version 2 of the License. tikhomirov@704: * tikhomirov@704: * This program is distributed in the hope that it will be useful, tikhomirov@704: * but WITHOUT ANY WARRANTY; without even the implied warranty of tikhomirov@704: * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the tikhomirov@704: * GNU General Public License for more details. tikhomirov@704: * tikhomirov@704: * For information on how to redistribute this software under tikhomirov@704: * the terms of a license other than GNU General Public License tikhomirov@704: * contact TMate Software at support@hg4j.com tikhomirov@704: */ tikhomirov@704: package org.tmatesoft.hg.core; tikhomirov@704: tikhomirov@704: import static org.tmatesoft.hg.repo.HgRepository.BAD_REVISION; tikhomirov@704: tikhomirov@705: import java.io.File; tikhomirov@705: import java.io.FileInputStream; tikhomirov@705: import java.io.IOException; tikhomirov@704: import java.io.InputStream; tikhomirov@704: tikhomirov@704: import org.tmatesoft.hg.internal.Callback; tikhomirov@704: import org.tmatesoft.hg.internal.CsetParamKeeper; tikhomirov@705: import org.tmatesoft.hg.internal.DirstateBuilder; tikhomirov@705: import org.tmatesoft.hg.internal.DirstateReader; tikhomirov@704: import org.tmatesoft.hg.internal.Experimental; tikhomirov@705: import org.tmatesoft.hg.internal.FileUtils; tikhomirov@705: import org.tmatesoft.hg.internal.Internals; tikhomirov@704: import org.tmatesoft.hg.internal.ManifestRevision; tikhomirov@705: import org.tmatesoft.hg.internal.MergeStateBuilder; tikhomirov@704: import org.tmatesoft.hg.internal.Pool; tikhomirov@705: import org.tmatesoft.hg.internal.Transaction; tikhomirov@705: import org.tmatesoft.hg.internal.WorkingDirFileWriter; tikhomirov@704: import org.tmatesoft.hg.repo.HgChangelog; tikhomirov@704: import org.tmatesoft.hg.repo.HgParentChildMap; tikhomirov@704: import org.tmatesoft.hg.repo.HgRepository; tikhomirov@704: import org.tmatesoft.hg.repo.HgRepositoryLock; tikhomirov@704: import org.tmatesoft.hg.repo.HgRevisionMap; tikhomirov@704: import org.tmatesoft.hg.repo.HgRuntimeException; tikhomirov@704: import org.tmatesoft.hg.util.CancelledException; tikhomirov@704: import org.tmatesoft.hg.util.Path; tikhomirov@704: tikhomirov@704: /** tikhomirov@704: * Merge two revisions, 'hg merge REV' counterpart tikhomirov@704: * tikhomirov@704: * @author Artem Tikhomirov tikhomirov@704: * @author TMate Software Ltd. tikhomirov@704: * @since 1.2 tikhomirov@704: */ tikhomirov@704: @Experimental(reason="Provisional API. Work in progress") tikhomirov@704: public class HgMergeCommand extends HgAbstractCommand { tikhomirov@704: tikhomirov@704: private final HgRepository repo; tikhomirov@704: private int firstCset, secondCset, ancestorCset; tikhomirov@704: tikhomirov@704: public HgMergeCommand(HgRepository hgRepo) { tikhomirov@704: repo = hgRepo; tikhomirov@704: firstCset = secondCset = ancestorCset = BAD_REVISION; tikhomirov@704: } tikhomirov@704: tikhomirov@704: public HgMergeCommand changeset(Nodeid changeset) throws HgBadArgumentException { tikhomirov@704: initHeadsAndAncestor(new CsetParamKeeper(repo).set(changeset).get()); tikhomirov@704: return this; tikhomirov@704: } tikhomirov@704: tikhomirov@704: public HgMergeCommand changeset(int revisionIndex) throws HgBadArgumentException { tikhomirov@704: initHeadsAndAncestor(new CsetParamKeeper(repo).set(revisionIndex).get()); tikhomirov@704: return this; tikhomirov@704: } tikhomirov@704: tikhomirov@705: public void execute(Mediator mediator) throws HgCallbackTargetException, HgRepositoryLockException, HgIOException, HgLibraryFailureException, CancelledException { tikhomirov@704: if (firstCset == BAD_REVISION || secondCset == BAD_REVISION || ancestorCset == BAD_REVISION) { tikhomirov@704: throw new IllegalArgumentException("Merge heads and their ancestors are not initialized"); tikhomirov@704: } tikhomirov@704: final HgRepositoryLock wdLock = repo.getWorkingDirLock(); tikhomirov@704: wdLock.acquire(); tikhomirov@704: try { tikhomirov@704: Pool cacheRevs = new Pool(); tikhomirov@704: Pool cacheFiles = new Pool(); tikhomirov@705: tikhomirov@705: Internals implRepo = Internals.getInstance(repo); tikhomirov@705: final DirstateBuilder dirstateBuilder = new DirstateBuilder(implRepo); tikhomirov@705: dirstateBuilder.fillFrom(new DirstateReader(implRepo, new Path.SimpleSource(repo.getSessionContext().getPathFactory(), cacheFiles))); tikhomirov@705: final HgChangelog clog = repo.getChangelog(); tikhomirov@705: dirstateBuilder.parents(clog.getRevision(firstCset), clog.getRevision(secondCset)); tikhomirov@705: // tikhomirov@705: MergeStateBuilder mergeStateBuilder = new MergeStateBuilder(implRepo); tikhomirov@705: tikhomirov@704: ManifestRevision m1, m2, ma; tikhomirov@704: m1 = new ManifestRevision(cacheRevs, cacheFiles).init(repo, firstCset); tikhomirov@704: m2 = new ManifestRevision(cacheRevs, cacheFiles).init(repo, secondCset); tikhomirov@704: ma = new ManifestRevision(cacheRevs, cacheFiles).init(repo, ancestorCset); tikhomirov@705: Transaction transaction = implRepo.getTransactionFactory().create(repo); tikhomirov@705: ResolverImpl resolver = new ResolverImpl(implRepo, dirstateBuilder, mergeStateBuilder); tikhomirov@705: try { tikhomirov@705: for (Path f : m1.files()) { tikhomirov@705: Nodeid fileRevBase, fileRevA, fileRevB; tikhomirov@705: if (m2.contains(f)) { tikhomirov@705: fileRevA = m1.nodeid(f); tikhomirov@705: fileRevB = m2.nodeid(f); tikhomirov@705: fileRevBase = ma.contains(f) ? ma.nodeid(f) : null; tikhomirov@705: if (fileRevA.equals(fileRevB)) { tikhomirov@705: HgFileRevision fr = new HgFileRevision(repo, fileRevA, m1.flags(f), f); tikhomirov@705: resolver.presentState(f, fr, fr); tikhomirov@705: mediator.same(fr, resolver); tikhomirov@705: } else if (fileRevBase == fileRevA) { tikhomirov@705: assert fileRevBase != null; tikhomirov@705: HgFileRevision frBase = new HgFileRevision(repo, fileRevBase, ma.flags(f), f); tikhomirov@705: HgFileRevision frSecond= new HgFileRevision(repo, fileRevB, m2.flags(f), f); tikhomirov@705: resolver.presentState(f, frBase, frSecond); tikhomirov@705: mediator.fastForwardB(frBase, frSecond, resolver); tikhomirov@705: } else if (fileRevBase == fileRevB) { tikhomirov@705: assert fileRevBase != null; tikhomirov@705: HgFileRevision frBase = new HgFileRevision(repo, fileRevBase, ma.flags(f), f); tikhomirov@705: HgFileRevision frFirst = new HgFileRevision(repo, fileRevA, m1.flags(f), f); tikhomirov@705: resolver.presentState(f, frFirst, frBase); tikhomirov@705: mediator.fastForwardA(frBase, frFirst, resolver); tikhomirov@705: } else { tikhomirov@705: HgFileRevision frBase = fileRevBase == null ? null : new HgFileRevision(repo, fileRevBase, ma.flags(f), f); tikhomirov@705: HgFileRevision frFirst = new HgFileRevision(repo, fileRevA, m1.flags(f), f); tikhomirov@705: HgFileRevision frSecond= new HgFileRevision(repo, fileRevB, m2.flags(f), f); tikhomirov@705: resolver.presentState(f, frFirst, frSecond); tikhomirov@705: mediator.resolve(frBase, frFirst, frSecond, resolver); tikhomirov@705: } tikhomirov@704: } else { tikhomirov@705: // m2 doesn't contain the file, either new in m1, or deleted in m2 tikhomirov@705: HgFileRevision frFirst = new HgFileRevision(repo, m1.nodeid(f), m1.flags(f), f); tikhomirov@705: resolver.presentState(f, frFirst, null); tikhomirov@705: if (ma.contains(f)) { tikhomirov@705: // deleted in m2 tikhomirov@705: HgFileRevision frBase = new HgFileRevision(repo, ma.nodeid(f), ma.flags(f), f); tikhomirov@705: mediator.onlyA(frBase, frFirst, resolver); tikhomirov@705: } else { tikhomirov@705: // new in m1 tikhomirov@705: mediator.newInA(frFirst, resolver); tikhomirov@705: } tikhomirov@704: } tikhomirov@705: resolver.apply(); tikhomirov@705: } // for m1 files tikhomirov@705: for (Path f : m2.files()) { tikhomirov@705: if (m1.contains(f)) { tikhomirov@705: continue; tikhomirov@705: } tikhomirov@705: HgFileRevision frSecond= new HgFileRevision(repo, m2.nodeid(f), m2.flags(f), f); tikhomirov@705: // file in m2 is either new or deleted in m1 tikhomirov@705: resolver.presentState(f, null, frSecond); tikhomirov@704: if (ma.contains(f)) { tikhomirov@705: // deleted in m1 tikhomirov@704: HgFileRevision frBase = new HgFileRevision(repo, ma.nodeid(f), ma.flags(f), f); tikhomirov@705: mediator.onlyB(frBase, frSecond, resolver); tikhomirov@704: } else { tikhomirov@705: // new in m2 tikhomirov@705: mediator.newInB(frSecond, resolver); tikhomirov@704: } tikhomirov@705: resolver.apply(); tikhomirov@704: } tikhomirov@705: resolver.serializeChanged(transaction); tikhomirov@705: transaction.commit(); tikhomirov@705: } catch (HgRuntimeException ex) { tikhomirov@705: transaction.rollback(); tikhomirov@705: throw ex; tikhomirov@705: } catch (HgIOException ex) { tikhomirov@705: transaction.rollback(); tikhomirov@705: throw ex; tikhomirov@704: } tikhomirov@704: } catch (HgRuntimeException ex) { tikhomirov@704: throw new HgLibraryFailureException(ex); tikhomirov@704: } finally { tikhomirov@704: wdLock.release(); tikhomirov@704: } tikhomirov@704: } tikhomirov@704: tikhomirov@704: private void initHeadsAndAncestor(int csetIndexB) throws HgBadArgumentException { tikhomirov@704: firstCset = secondCset = ancestorCset = BAD_REVISION; tikhomirov@704: if (csetIndexB == HgRepository.BAD_REVISION) { tikhomirov@704: throw new HgBadArgumentException("Need valid second head for merge", null); tikhomirov@704: } tikhomirov@704: // TODO cache/share parent-child map, e.g. right in HgChangelog?! #getOrCreate tikhomirov@704: HgParentChildMap pmap = new HgParentChildMap(repo.getChangelog()); tikhomirov@704: pmap.init(); tikhomirov@704: final HgRevisionMap rmap = pmap.getRevisionMap(); tikhomirov@704: final Nodeid csetA = repo.getWorkingCopyParents().first(); tikhomirov@704: final Nodeid csetB = rmap.revision(csetIndexB); tikhomirov@704: final Nodeid ancestor = pmap.ancestor(csetA, csetB); tikhomirov@704: assert !ancestor.isNull(); tikhomirov@704: if (ancestor.equals(csetA) || ancestor.equals(csetB)) { tikhomirov@704: throw new HgBadArgumentException(String.format("Revisions %s and %s are on the same line of descent, use update instead of merge", csetA.shortNotation(), csetB.shortNotation()), null); tikhomirov@704: } tikhomirov@704: firstCset = rmap.revisionIndex(csetA); tikhomirov@704: secondCset = csetIndexB; tikhomirov@704: ancestorCset = rmap.revisionIndex(ancestor); tikhomirov@704: } tikhomirov@704: tikhomirov@704: /** tikhomirov@705: * This is the way client code takes part in the merge process. tikhomirov@705: * It's advised to subclass {@link MediatorBase} unless special treatment for regular cases is desired tikhomirov@704: */ tikhomirov@704: @Experimental(reason="Provisional API. Work in progress") tikhomirov@704: @Callback tikhomirov@704: public interface Mediator { tikhomirov@705: /** tikhomirov@705: * file revisions are identical in both heads tikhomirov@705: */ tikhomirov@705: public void same(HgFileRevision rev, Resolver resolver) throws HgCallbackTargetException; tikhomirov@705: /** tikhomirov@705: * file left in first/left/A trunk only, deleted in second/right/B trunk tikhomirov@705: */ tikhomirov@704: public void onlyA(HgFileRevision base, HgFileRevision rev, Resolver resolver) throws HgCallbackTargetException; tikhomirov@705: /** tikhomirov@705: * file left in second/right/B trunk only, deleted in first/left/A trunk tikhomirov@705: */ tikhomirov@704: public void onlyB(HgFileRevision base, HgFileRevision rev, Resolver resolver) throws HgCallbackTargetException; tikhomirov@705: /** tikhomirov@705: * file is missing in ancestor revision and second/right/B trunk, introduced in first/left/A trunk tikhomirov@705: */ tikhomirov@704: public void newInA(HgFileRevision rev, Resolver resolver) throws HgCallbackTargetException; tikhomirov@705: /** tikhomirov@705: * file is missing in ancestor revision and first/left/A trunk, introduced in second/right/B trunk tikhomirov@705: */ tikhomirov@704: public void newInB(HgFileRevision rev, Resolver resolver) throws HgCallbackTargetException; tikhomirov@705: /** tikhomirov@705: * file was changed in first/left/A trunk, unchanged in second/right/B trunk tikhomirov@705: */ tikhomirov@704: public void fastForwardA(HgFileRevision base, HgFileRevision first, Resolver resolver) throws HgCallbackTargetException; tikhomirov@705: /** tikhomirov@705: * file was changed in second/right/B trunk, unchanged in first/left/A trunk tikhomirov@705: */ tikhomirov@704: public void fastForwardB(HgFileRevision base, HgFileRevision second, Resolver resolver) throws HgCallbackTargetException; tikhomirov@705: /** tikhomirov@705: * File changed (or added, if base is null) in both trunks tikhomirov@705: */ tikhomirov@704: public void resolve(HgFileRevision base, HgFileRevision first, HgFileRevision second, Resolver resolver) throws HgCallbackTargetException; tikhomirov@704: } tikhomirov@704: tikhomirov@704: /** tikhomirov@704: * Clients shall not implement this interface. tikhomirov@704: * They use this API from inside {@link Mediator#resolve(HgFileRevision, HgFileRevision, HgFileRevision, Resolver)} tikhomirov@704: */ tikhomirov@704: @Experimental(reason="Provisional API. Work in progress") tikhomirov@704: public interface Resolver { tikhomirov@704: public void use(HgFileRevision rev); tikhomirov@705: /** tikhomirov@705: * Replace current revision with stream content. tikhomirov@705: * Note, callers are not expected to {@link InputStream#close()} this stream. tikhomirov@705: * It will be {@link InputStream#close() closed} at Hg4J's discretion tikhomirov@705: * not necessarily during invocation of this method. IOW, the library may decide to tikhomirov@705: * use this stream not right away, at some point of time later, and streams supplied tikhomirov@705: * shall respect this. tikhomirov@705: * tikhomirov@705: * @param content New content to replace current revision, shall not be null tikhomirov@705: * @throws IOException propagated exceptions from content tikhomirov@705: */ tikhomirov@705: public void use(InputStream content) throws IOException; tikhomirov@705: public void forget(HgFileRevision rev); tikhomirov@704: public void unresolved(); // record the file for later processing by 'hg resolve' tikhomirov@704: } tikhomirov@704: tikhomirov@705: /** tikhomirov@705: * Base mediator implementation, with regular resolution tikhomirov@705: */ tikhomirov@705: @Experimental(reason="Provisional API. Work in progress") tikhomirov@705: public abstract class MediatorBase implements Mediator { tikhomirov@705: public void same(HgFileRevision rev, Resolver resolver) throws HgCallbackTargetException { tikhomirov@705: resolver.use(rev); tikhomirov@705: } tikhomirov@705: public void onlyA(HgFileRevision base, HgFileRevision rev, Resolver resolver) throws HgCallbackTargetException { tikhomirov@705: resolver.use(rev); tikhomirov@705: } tikhomirov@705: public void onlyB(HgFileRevision base, HgFileRevision rev, Resolver resolver) throws HgCallbackTargetException { tikhomirov@705: resolver.use(rev); tikhomirov@705: } tikhomirov@705: public void newInA(HgFileRevision rev, Resolver resolver) throws HgCallbackTargetException { tikhomirov@705: resolver.use(rev); tikhomirov@705: } tikhomirov@705: public void newInB(HgFileRevision rev, Resolver resolver) throws HgCallbackTargetException { tikhomirov@705: resolver.use(rev); tikhomirov@705: } tikhomirov@705: public void fastForwardA(HgFileRevision base, HgFileRevision first, Resolver resolver) throws HgCallbackTargetException { tikhomirov@705: resolver.use(first); tikhomirov@705: } tikhomirov@705: public void fastForwardB(HgFileRevision base, HgFileRevision second, Resolver resolver) throws HgCallbackTargetException { tikhomirov@705: resolver.use(second); tikhomirov@705: } tikhomirov@705: } tikhomirov@705: tikhomirov@704: private static class ResolverImpl implements Resolver { tikhomirov@705: tikhomirov@705: private final Internals repo; tikhomirov@705: private final DirstateBuilder dirstateBuilder; tikhomirov@705: private final MergeStateBuilder mergeStateBuilder; tikhomirov@705: private boolean changedDirstate; tikhomirov@705: private HgFileRevision revA; tikhomirov@705: private HgFileRevision revB; tikhomirov@705: private Path file; tikhomirov@705: // resolutions: tikhomirov@705: private HgFileRevision resolveUse, resolveForget; tikhomirov@705: private File resolveContent; tikhomirov@705: private boolean resolveMarkUnresolved; tikhomirov@705: tikhomirov@705: public ResolverImpl(Internals implRepo, DirstateBuilder dirstateBuilder, MergeStateBuilder mergeStateBuilder) { tikhomirov@705: repo = implRepo; tikhomirov@705: this.dirstateBuilder = dirstateBuilder; tikhomirov@705: this.mergeStateBuilder = mergeStateBuilder; tikhomirov@705: changedDirstate = false; tikhomirov@705: } tikhomirov@705: tikhomirov@705: void serializeChanged(Transaction tr) throws HgIOException { tikhomirov@705: if (changedDirstate) { tikhomirov@705: dirstateBuilder.serialize(tr); tikhomirov@705: } tikhomirov@705: mergeStateBuilder.serialize(tr); tikhomirov@705: } tikhomirov@705: tikhomirov@705: void presentState(Path p, HgFileRevision revA, HgFileRevision revB) { tikhomirov@705: assert revA != null || revB != null; tikhomirov@705: file = p; tikhomirov@705: this.revA = revA; tikhomirov@705: this.revB = revB; tikhomirov@705: resolveUse = resolveForget = null; tikhomirov@705: resolveContent = null; tikhomirov@705: resolveMarkUnresolved = false; tikhomirov@705: } tikhomirov@705: tikhomirov@705: void apply() throws HgIOException, HgRuntimeException { tikhomirov@705: if (resolveMarkUnresolved) { tikhomirov@705: mergeStateBuilder.unresolved(file); tikhomirov@705: } else if (resolveForget != null) { tikhomirov@705: if (resolveForget == revA) { tikhomirov@705: changedDirstate = true; tikhomirov@705: dirstateBuilder.recordRemoved(file); tikhomirov@705: } tikhomirov@705: } else if (resolveUse != null) { tikhomirov@705: if (resolveUse != revA) { tikhomirov@705: changedDirstate = true; tikhomirov@705: final WorkingDirFileWriter fw = new WorkingDirFileWriter(repo); tikhomirov@705: fw.processFile(resolveUse); tikhomirov@705: if (resolveUse == revB) { tikhomirov@705: dirstateBuilder.recordMergedFromP2(file); tikhomirov@705: } else { tikhomirov@705: dirstateBuilder.recordMerged(file, fw.fmode(), fw.mtime(), fw.bytesWritten()); tikhomirov@705: } tikhomirov@705: } // if resolution is to use revA, nothing to do tikhomirov@705: } else if (resolveContent != null) { tikhomirov@705: changedDirstate = true; tikhomirov@705: // FIXME write content to file using transaction? tikhomirov@705: InputStream is; tikhomirov@705: try { tikhomirov@705: is = new FileInputStream(resolveContent); tikhomirov@705: } catch (IOException ex) { tikhomirov@705: throw new HgIOException("Failed to read temporary content", ex, resolveContent); tikhomirov@705: } tikhomirov@705: final WorkingDirFileWriter fw = new WorkingDirFileWriter(repo); tikhomirov@705: fw.processFile(file, is, revA == null ? revB.getFileFlags() : revA.getFileFlags()); tikhomirov@705: // XXX if presentState(null, fileOnlyInB), and use(InputStream) - i.e. tikhomirov@705: // resolution is to add file with supplied content - shall I put 'Merged', MergedFromP2 or 'Added' into dirstate? tikhomirov@705: if (revA == null && revB != null) { tikhomirov@705: dirstateBuilder.recordMergedFromP2(file); tikhomirov@705: } else { tikhomirov@705: dirstateBuilder.recordMerged(file, fw.fmode(), fw.mtime(), fw.bytesWritten()); tikhomirov@705: } tikhomirov@705: } else { tikhomirov@705: assert false; tikhomirov@705: } tikhomirov@704: } tikhomirov@704: tikhomirov@704: public void use(HgFileRevision rev) { tikhomirov@705: if (rev == null) { tikhomirov@705: throw new IllegalArgumentException(); tikhomirov@705: } tikhomirov@705: assert resolveContent == null; tikhomirov@705: assert resolveForget == null; tikhomirov@705: resolveUse = rev; tikhomirov@704: } tikhomirov@704: tikhomirov@705: public void use(InputStream content) throws IOException { tikhomirov@705: if (content == null) { tikhomirov@705: throw new IllegalArgumentException(); tikhomirov@705: } tikhomirov@705: assert resolveUse == null; tikhomirov@705: assert resolveForget == null; tikhomirov@705: try { tikhomirov@705: // cache new contents just to fail fast if there are troubles with content tikhomirov@705: final FileUtils fileUtils = new FileUtils(repo.getLog(), this); tikhomirov@705: resolveContent = fileUtils.createTempFile(); tikhomirov@705: fileUtils.write(content, resolveContent); tikhomirov@705: } finally { tikhomirov@705: content.close(); tikhomirov@705: } tikhomirov@705: // do not care deleting file in case of failure to allow analyze of the issue tikhomirov@705: } tikhomirov@705: tikhomirov@705: public void forget(HgFileRevision rev) { tikhomirov@705: if (rev == null) { tikhomirov@705: throw new IllegalArgumentException(); tikhomirov@705: } tikhomirov@705: if (rev != revA || rev != revB) { tikhomirov@705: throw new IllegalArgumentException("Can't forget revision which doesn't represent actual state in either merged trunk"); tikhomirov@705: } tikhomirov@705: assert resolveUse == null; tikhomirov@705: assert resolveContent == null; tikhomirov@705: resolveForget = rev; tikhomirov@704: } tikhomirov@704: tikhomirov@704: public void unresolved() { tikhomirov@705: resolveMarkUnresolved = true; tikhomirov@704: } tikhomirov@704: } tikhomirov@704: }