Mercurial > hg4j
comparison src/org/tmatesoft/hg/repo/CommitFacility.java @ 538:dd4f6311af52
Commit: first working version
author | Artem Tikhomirov <tikhomirov.artem@gmail.com> |
---|---|
date | Tue, 05 Feb 2013 22:30:21 +0100 |
parents | |
children | 9edfd5a223b8 |
comparison
equal
deleted
inserted
replaced
537:5a455624be4f | 538:dd4f6311af52 |
---|---|
1 /* | |
2 * Copyright (c) 2013 TMate Software Ltd | |
3 * | |
4 * This program is free software; you can redistribute it and/or modify | |
5 * it under the terms of the GNU General Public License as published by | |
6 * the Free Software Foundation; version 2 of the License. | |
7 * | |
8 * This program is distributed in the hope that it will be useful, | |
9 * but WITHOUT ANY WARRANTY; without even the implied warranty of | |
10 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
11 * GNU General Public License for more details. | |
12 * | |
13 * For information on how to redistribute this software under | |
14 * the terms of a license other than GNU General Public License | |
15 * contact TMate Software at support@hg4j.com | |
16 */ | |
17 package org.tmatesoft.hg.repo; | |
18 | |
19 import static org.tmatesoft.hg.repo.HgRepository.NO_REVISION; | |
20 | |
21 import java.nio.ByteBuffer; | |
22 import java.util.HashMap; | |
23 import java.util.LinkedHashMap; | |
24 import java.util.Map; | |
25 import java.util.TreeMap; | |
26 | |
27 import org.tmatesoft.hg.core.HgRepositoryLockException; | |
28 import org.tmatesoft.hg.core.Nodeid; | |
29 import org.tmatesoft.hg.internal.ByteArrayChannel; | |
30 import org.tmatesoft.hg.internal.ChangelogEntryBuilder; | |
31 import org.tmatesoft.hg.internal.Experimental; | |
32 import org.tmatesoft.hg.internal.ManifestEntryBuilder; | |
33 import org.tmatesoft.hg.internal.ManifestRevision; | |
34 import org.tmatesoft.hg.internal.RevlogStreamWriter; | |
35 import org.tmatesoft.hg.util.Pair; | |
36 import org.tmatesoft.hg.util.Path; | |
37 | |
38 /** | |
39 * WORK IN PROGRESS | |
40 * | |
41 * @author Artem Tikhomirov | |
42 * @author TMate Software Ltd. | |
43 */ | |
44 @Experimental(reason="Work in progress") | |
45 public class CommitFacility { | |
46 private final HgRepository repo; | |
47 private final int p1Commit, p2Commit; | |
48 private Map<Path, Pair<HgDataFile, ByteDataSupplier>> files = new LinkedHashMap<Path, Pair<HgDataFile, ByteDataSupplier>>(); | |
49 | |
50 | |
51 public CommitFacility(HgRepository hgRepo, int parentCommit) { | |
52 this(hgRepo, parentCommit, NO_REVISION); | |
53 } | |
54 | |
55 public CommitFacility(HgRepository hgRepo, int parent1Commit, int parent2Commit) { | |
56 repo = hgRepo; | |
57 p1Commit = parent1Commit; | |
58 p2Commit = parent2Commit; | |
59 if (parent1Commit != NO_REVISION && parent1Commit == parent2Commit) { | |
60 throw new IllegalArgumentException("Merging same revision is dubious"); | |
61 } | |
62 } | |
63 | |
64 public boolean isMerge() { | |
65 return p1Commit != NO_REVISION && p2Commit != NO_REVISION; | |
66 } | |
67 | |
68 public void add(HgDataFile dataFile, ByteDataSupplier content) { | |
69 files.put(dataFile.getPath(), new Pair<HgDataFile, ByteDataSupplier>(dataFile, content)); | |
70 } | |
71 | |
72 public Nodeid commit(String message) throws HgRepositoryLockException { | |
73 | |
74 final HgChangelog clog = repo.getChangelog(); | |
75 final int clogRevisionIndex = clog.getRevisionCount(); | |
76 ManifestRevision c1Manifest = new ManifestRevision(null, null); | |
77 ManifestRevision c2Manifest = new ManifestRevision(null, null); | |
78 if (p1Commit != NO_REVISION) { | |
79 repo.getManifest().walk(p1Commit, p1Commit, c1Manifest); | |
80 } | |
81 if (p2Commit != NO_REVISION) { | |
82 repo.getManifest().walk(p2Commit, p2Commit, c2Manifest); | |
83 } | |
84 // Pair<Integer, Integer> manifestParents = getManifestParents(); | |
85 Pair<Integer, Integer> manifestParents = new Pair<Integer, Integer>(c1Manifest.revisionIndex(), c2Manifest.revisionIndex()); | |
86 TreeMap<Path, Nodeid> newManifestRevision = new TreeMap<Path, Nodeid>(); | |
87 HashMap<Path, Pair<Integer, Integer>> fileParents = new HashMap<Path, Pair<Integer,Integer>>(); | |
88 for (Path f : c1Manifest.files()) { | |
89 HgDataFile df = repo.getFileNode(f); | |
90 Nodeid fileKnownRev = c1Manifest.nodeid(f); | |
91 int fileRevIndex = df.getRevisionIndex(fileKnownRev); | |
92 // FIXME merged files?! | |
93 fileParents.put(f, new Pair<Integer, Integer>(fileRevIndex, NO_REVISION)); | |
94 newManifestRevision.put(f, fileKnownRev); | |
95 } | |
96 // | |
97 // Files | |
98 for (Pair<HgDataFile, ByteDataSupplier> e : files.values()) { | |
99 HgDataFile df = e.first(); | |
100 Pair<Integer, Integer> fp = fileParents.get(df.getPath()); | |
101 if (fp == null) { | |
102 // NEW FILE | |
103 fp = new Pair<Integer, Integer>(NO_REVISION, NO_REVISION); | |
104 } | |
105 ByteDataSupplier bds = e.second(); | |
106 // FIXME quickfix, instead, pass ByteDataSupplier directly to RevlogStreamWriter | |
107 ByteBuffer bb = ByteBuffer.allocate(2048); | |
108 ByteArrayChannel bac = new ByteArrayChannel(); | |
109 while (bds.read(bb) != -1) { | |
110 bb.flip(); | |
111 bac.write(bb); | |
112 bb.clear(); | |
113 } | |
114 RevlogStreamWriter fileWriter = new RevlogStreamWriter(repo.getSessionContext(), df.content); | |
115 Nodeid fileRev = fileWriter.addRevision(bac.toArray(), clogRevisionIndex, fp.first(), fp.second()); | |
116 newManifestRevision.put(df.getPath(), fileRev); | |
117 } | |
118 // | |
119 // Manifest | |
120 final ManifestEntryBuilder manifestBuilder = new ManifestEntryBuilder(); | |
121 for (Map.Entry<Path, Nodeid> me : newManifestRevision.entrySet()) { | |
122 manifestBuilder.add(me.getKey().toString(), me.getValue()); | |
123 } | |
124 RevlogStreamWriter manifestWriter = new RevlogStreamWriter(repo.getSessionContext(), repo.getManifest().content); | |
125 Nodeid manifestRev = manifestWriter.addRevision(manifestBuilder.build(), clogRevisionIndex, manifestParents.first(), manifestParents.second()); | |
126 // | |
127 // Changelog | |
128 final ChangelogEntryBuilder changelogBuilder = new ChangelogEntryBuilder(); | |
129 changelogBuilder.setModified(files.keySet()); | |
130 byte[] clogContent = changelogBuilder.build(manifestRev, message); | |
131 RevlogStreamWriter changelogWriter = new RevlogStreamWriter(repo.getSessionContext(), clog.content); | |
132 Nodeid changesetRev = changelogWriter.addRevision(clogContent, clogRevisionIndex, p1Commit, p2Commit); | |
133 return changesetRev; | |
134 } | |
135 /* | |
136 private Pair<Integer, Integer> getManifestParents() { | |
137 return new Pair<Integer, Integer>(extractManifestRevisionIndex(p1Commit), extractManifestRevisionIndex(p2Commit)); | |
138 } | |
139 | |
140 private int extractManifestRevisionIndex(int clogRevIndex) { | |
141 if (clogRevIndex == NO_REVISION) { | |
142 return NO_REVISION; | |
143 } | |
144 RawChangeset commitObject = repo.getChangelog().range(clogRevIndex, clogRevIndex).get(0); | |
145 Nodeid manifestRev = commitObject.manifest(); | |
146 if (manifestRev.isNull()) { | |
147 return NO_REVISION; | |
148 } | |
149 return repo.getManifest().getRevisionIndex(manifestRev); | |
150 } | |
151 */ | |
152 | |
153 // unlike DataAccess (which provides structured access), this one | |
154 // deals with a sequence of bytes, when there's no need in structure of the data | |
155 public interface ByteDataSupplier { // TODO look if can resolve DataAccess in HgCloneCommand visibility issue | |
156 int read(ByteBuffer buf); | |
157 } | |
158 | |
159 public interface ByteDataConsumer { | |
160 void write(ByteBuffer buf); | |
161 } | |
162 } |