comparison src/org/tmatesoft/hg/internal/CommitFacility.java @ 591:e447384f3771

CommitFacility as internal class; refactored infrastructure around internals (access to RevlogStream)
author Artem Tikhomirov <tikhomirov.artem@gmail.com>
date Tue, 30 Apr 2013 18:55:42 +0200
parents src/org/tmatesoft/hg/repo/CommitFacility.java@41218d84842a
children c56edf42be64
comparison
equal deleted inserted replaced
590:8cbc2a883d95 591:e447384f3771
1 /*
2 * Copyright (c) 2013 TMate Software Ltd
3 *
4 * This program is free software; you can redistribute it and/or modify
5 * it under the terms of the GNU General Public License as published by
6 * the Free Software Foundation; version 2 of the License.
7 *
8 * This program is distributed in the hope that it will be useful,
9 * but WITHOUT ANY WARRANTY; without even the implied warranty of
10 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 * GNU General Public License for more details.
12 *
13 * For information on how to redistribute this software under
14 * the terms of a license other than GNU General Public License
15 * contact TMate Software at support@hg4j.com
16 */
17 package org.tmatesoft.hg.internal;
18
19 import static org.tmatesoft.hg.repo.HgRepository.NO_REVISION;
20
21 import java.io.IOException;
22 import java.nio.ByteBuffer;
23 import java.util.ArrayList;
24 import java.util.HashMap;
25 import java.util.LinkedHashMap;
26 import java.util.Map;
27 import java.util.Set;
28 import java.util.TreeMap;
29 import java.util.TreeSet;
30
31 import org.tmatesoft.hg.core.HgCommitCommand;
32 import org.tmatesoft.hg.core.HgIOException;
33 import org.tmatesoft.hg.core.HgRepositoryLockException;
34 import org.tmatesoft.hg.core.Nodeid;
35 import org.tmatesoft.hg.repo.HgChangelog;
36 import org.tmatesoft.hg.repo.HgDataFile;
37 import org.tmatesoft.hg.repo.HgRepository;
38 import org.tmatesoft.hg.util.Pair;
39 import org.tmatesoft.hg.util.Path;
40 import org.tmatesoft.hg.util.LogFacility.Severity;
41
42 /**
43 * WORK IN PROGRESS
44 * Name: CommitObject, FutureCommit or PendingCommit
45 * Only public API now: {@link HgCommitCommand}.
46 *
47 * @author Artem Tikhomirov
48 * @author TMate Software Ltd.
49 */
50 @Experimental(reason="Work in progress")
51 public final class CommitFacility {
52 private final Internals repo;
53 private final int p1Commit, p2Commit;
54 private Map<Path, Pair<HgDataFile, ByteDataSupplier>> files = new LinkedHashMap<Path, Pair<HgDataFile, ByteDataSupplier>>();
55 private Set<Path> removals = new TreeSet<Path>();
56 private String branch, user;
57
58 public CommitFacility(Internals hgRepo, int parentCommit) {
59 this(hgRepo, parentCommit, NO_REVISION);
60 }
61
62 public CommitFacility(Internals hgRepo, int parent1Commit, int parent2Commit) {
63 repo = hgRepo;
64 p1Commit = parent1Commit;
65 p2Commit = parent2Commit;
66 if (parent1Commit != NO_REVISION && parent1Commit == parent2Commit) {
67 throw new IllegalArgumentException("Merging same revision is dubious");
68 }
69 }
70
71 public boolean isMerge() {
72 return p1Commit != NO_REVISION && p2Commit != NO_REVISION;
73 }
74
75 public void add(HgDataFile dataFile, ByteDataSupplier content) {
76 if (content == null) {
77 throw new IllegalArgumentException();
78 }
79 removals.remove(dataFile.getPath());
80 files.put(dataFile.getPath(), new Pair<HgDataFile, ByteDataSupplier>(dataFile, content));
81 }
82
83 public void forget(HgDataFile dataFile) {
84 files.remove(dataFile.getPath());
85 removals.add(dataFile.getPath());
86 }
87
88 public void branch(String branchName) {
89 branch = branchName;
90 }
91
92 public void user(String userName) {
93 user = userName;
94 }
95
96 public Nodeid commit(String message) throws HgIOException, HgRepositoryLockException {
97 final HgChangelog clog = repo.getRepo().getChangelog();
98 final int clogRevisionIndex = clog.getRevisionCount();
99 ManifestRevision c1Manifest = new ManifestRevision(null, null);
100 ManifestRevision c2Manifest = new ManifestRevision(null, null);
101 if (p1Commit != NO_REVISION) {
102 repo.getRepo().getManifest().walk(p1Commit, p1Commit, c1Manifest);
103 }
104 if (p2Commit != NO_REVISION) {
105 repo.getRepo().getManifest().walk(p2Commit, p2Commit, c2Manifest);
106 }
107 // Pair<Integer, Integer> manifestParents = getManifestParents();
108 Pair<Integer, Integer> manifestParents = new Pair<Integer, Integer>(c1Manifest.revisionIndex(), c2Manifest.revisionIndex());
109 TreeMap<Path, Nodeid> newManifestRevision = new TreeMap<Path, Nodeid>();
110 HashMap<Path, Pair<Integer, Integer>> fileParents = new HashMap<Path, Pair<Integer,Integer>>();
111 for (Path f : c1Manifest.files()) {
112 HgDataFile df = repo.getRepo().getFileNode(f);
113 Nodeid fileKnownRev1 = c1Manifest.nodeid(f), fileKnownRev2;
114 final int fileRevIndex1 = df.getRevisionIndex(fileKnownRev1);
115 final int fileRevIndex2;
116 if ((fileKnownRev2 = c2Manifest.nodeid(f)) != null) {
117 // merged files
118 fileRevIndex2 = df.getRevisionIndex(fileKnownRev2);
119 } else {
120 fileRevIndex2 = NO_REVISION;
121 }
122
123 fileParents.put(f, new Pair<Integer, Integer>(fileRevIndex1, fileRevIndex2));
124 newManifestRevision.put(f, fileKnownRev1);
125 }
126 //
127 // Forget removed
128 for (Path p : removals) {
129 newManifestRevision.remove(p);
130 }
131 //
132 // Register new/changed
133 ArrayList<Path> newlyAddedFiles = new ArrayList<Path>();
134 ArrayList<Path> touchInDirstate = new ArrayList<Path>();
135 for (Pair<HgDataFile, ByteDataSupplier> e : files.values()) {
136 HgDataFile df = e.first();
137 Pair<Integer, Integer> fp = fileParents.get(df.getPath());
138 if (fp == null) {
139 // NEW FILE
140 fp = new Pair<Integer, Integer>(NO_REVISION, NO_REVISION);
141 }
142 ByteDataSupplier bds = e.second();
143 // FIXME quickfix, instead, pass ByteDataSupplier directly to RevlogStreamWriter
144 ByteBuffer bb = ByteBuffer.allocate(2048);
145 ByteArrayChannel bac = new ByteArrayChannel();
146 while (bds.read(bb) != -1) {
147 bb.flip();
148 bac.write(bb);
149 bb.clear();
150 }
151 RevlogStream contentStream;
152 if (df.exists()) {
153 contentStream = repo.getImplAccess().getStream(df);
154 } else {
155 contentStream = repo.createStoreFile(df.getPath());
156 newlyAddedFiles.add(df.getPath());
157 // FIXME df doesn't get df.content updated, and clients
158 // that would attempt to access newly added file after commit would fail
159 // (despite the fact the file is in there)
160 }
161 RevlogStreamWriter fileWriter = new RevlogStreamWriter(repo, contentStream);
162 Nodeid fileRev = fileWriter.addRevision(bac.toArray(), clogRevisionIndex, fp.first(), fp.second());
163 newManifestRevision.put(df.getPath(), fileRev);
164 touchInDirstate.add(df.getPath());
165 }
166 //
167 // Manifest
168 final ManifestEntryBuilder manifestBuilder = new ManifestEntryBuilder();
169 for (Map.Entry<Path, Nodeid> me : newManifestRevision.entrySet()) {
170 manifestBuilder.add(me.getKey().toString(), me.getValue());
171 }
172 RevlogStreamWriter manifestWriter = new RevlogStreamWriter(repo, repo.getImplAccess().getManifestStream());
173 Nodeid manifestRev = manifestWriter.addRevision(manifestBuilder.build(), clogRevisionIndex, manifestParents.first(), manifestParents.second());
174 //
175 // Changelog
176 final ChangelogEntryBuilder changelogBuilder = new ChangelogEntryBuilder();
177 changelogBuilder.setModified(files.keySet());
178 changelogBuilder.branch(branch == null ? HgRepository.DEFAULT_BRANCH_NAME : branch);
179 changelogBuilder.user(String.valueOf(user));
180 byte[] clogContent = changelogBuilder.build(manifestRev, message);
181 RevlogStreamWriter changelogWriter = new RevlogStreamWriter(repo, repo.getImplAccess().getChangelogStream());
182 Nodeid changesetRev = changelogWriter.addRevision(clogContent, clogRevisionIndex, p1Commit, p2Commit);
183 // FIXME move fncache update to an external facility, along with dirstate update
184 if (!newlyAddedFiles.isEmpty() && repo.fncacheInUse()) {
185 FNCacheFile fncache = new FNCacheFile(repo);
186 for (Path p : newlyAddedFiles) {
187 fncache.add(p);
188 }
189 try {
190 fncache.write();
191 } catch (IOException ex) {
192 // see comment above for fnchache.read()
193 repo.getSessionContext().getLog().dump(getClass(), Severity.Error, ex, "Failed to write fncache, error ignored");
194 }
195 }
196 // bring dirstate up to commit state
197 final DirstateBuilder dirstateBuilder = new DirstateBuilder(repo);
198 dirstateBuilder.fillFrom(new DirstateReader(repo, new Path.SimpleSource()));
199 for (Path p : removals) {
200 dirstateBuilder.recordRemoved(p);
201 }
202 for (Path p : touchInDirstate) {
203 dirstateBuilder.recordUncertain(p);
204 }
205 dirstateBuilder.parents(changesetRev, Nodeid.NULL);
206 dirstateBuilder.serialize();
207 return changesetRev;
208 }
209 /*
210 private Pair<Integer, Integer> getManifestParents() {
211 return new Pair<Integer, Integer>(extractManifestRevisionIndex(p1Commit), extractManifestRevisionIndex(p2Commit));
212 }
213
214 private int extractManifestRevisionIndex(int clogRevIndex) {
215 if (clogRevIndex == NO_REVISION) {
216 return NO_REVISION;
217 }
218 RawChangeset commitObject = repo.getChangelog().range(clogRevIndex, clogRevIndex).get(0);
219 Nodeid manifestRev = commitObject.manifest();
220 if (manifestRev.isNull()) {
221 return NO_REVISION;
222 }
223 return repo.getManifest().getRevisionIndex(manifestRev);
224 }
225 */
226
227 // unlike DataAccess (which provides structured access), this one
228 // deals with a sequence of bytes, when there's no need in structure of the data
229 // FIXME java.nio.ReadableByteChannel or ByteStream/ByteSequence(read, length, reset)
230 // SHALL be inline with util.ByteChannel, reading bytes from HgDataFile, preferably DataAccess#readBytes(BB) to match API,
231 // and a wrap for ByteVector
232 public interface ByteDataSupplier { // TODO look if can resolve DataAccess in HgCloneCommand visibility issue
233 // FIXME needs lifecycle, e.g. for supplier that reads from WC
234 int read(ByteBuffer buf);
235 }
236
237 public interface ByteDataConsumer {
238 void write(ByteBuffer buf);
239 }
240 }