author | Greg Ward <greg@gerg.ca> |
Thu, 13 Oct 2011 20:24:29 -0400 | |
changeset 15253 | 67d010779907 |
parent 15252 | 6e809bb4f969 |
child 15254 | dd03d3a9f888 |
permissions | -rw-r--r-- |
15168 | 1 |
# Copyright 2009-2010 Gregory P. Ward |
2 |
# Copyright 2009-2010 Intelerad Medical Systems Incorporated |
|
3 |
# Copyright 2010-2011 Fog Creek Software |
|
4 |
# Copyright 2010-2011 Unity Technologies |
|
5 |
# |
|
6 |
# This software may be used and distributed according to the terms of the |
|
7 |
# GNU General Public License version 2 or any later version. |
|
8 |
||
15252
6e809bb4f969
largefiles: improve comments, internal docstrings
Greg Ward <greg@gerg.ca>
parents:
15230
diff
changeset
|
9 |
'''High-level command function for lfconvert, plus the cmdtable.''' |
15168 | 10 |
|
11 |
import os |
|
12 |
import shutil |
|
13 |
||
14 |
from mercurial import util, match as match_, hg, node, context, error |
|
15 |
from mercurial.i18n import _ |
|
16 |
||
17 |
import lfutil |
|
18 |
import basestore |
|
19 |
||
20 |
# -- Commands ---------------------------------------------------------- |
|
21 |
||
22 |
def lfconvert(ui, src, dest, *pats, **opts): |
|
15230 | 23 |
'''convert a normal repository to a largefiles repository |
15168 | 24 |
|
15230 | 25 |
Convert repository SOURCE to a new repository DEST, identical to |
26 |
SOURCE except that certain files will be converted as largefiles: |
|
27 |
specifically, any file that matches any PATTERN *or* whose size is |
|
28 |
above the minimum size threshold is converted as a largefile. The |
|
29 |
size used to determine whether or not to track a file as a |
|
30 |
largefile is the size of the first version of the file. The |
|
31 |
minimum size can be specified either with --size or in |
|
32 |
configuration as ``largefiles.size``. |
|
33 |
||
34 |
After running this command you will need to make sure that |
|
35 |
largefiles is enabled anywhere you intend to push the new |
|
36 |
repository. |
|
37 |
||
38 |
Use --tonormal to convert largefiles back to normal files; after |
|
39 |
this, the DEST repository can be used without largefiles at all.''' |
|
15168 | 40 |
|
41 |
if opts['tonormal']: |
|
42 |
tolfile = False |
|
43 |
else: |
|
44 |
tolfile = True |
|
15227
a7686abf73a6
largefiles: factor out lfutil.getminsize()
Greg Ward <greg@gerg.ca>
parents:
15224
diff
changeset
|
45 |
size = lfutil.getminsize(ui, True, opts.get('size'), default=None) |
15168 | 46 |
try: |
47 |
rsrc = hg.repository(ui, src) |
|
48 |
if not rsrc.local(): |
|
49 |
raise util.Abort(_('%s is not a local Mercurial repo') % src) |
|
50 |
except error.RepoError, err: |
|
51 |
ui.traceback() |
|
52 |
raise util.Abort(err.args[0]) |
|
53 |
if os.path.exists(dest): |
|
54 |
if not os.path.isdir(dest): |
|
55 |
raise util.Abort(_('destination %s already exists') % dest) |
|
56 |
elif os.listdir(dest): |
|
57 |
raise util.Abort(_('destination %s is not empty') % dest) |
|
58 |
try: |
|
59 |
ui.status(_('initializing destination %s\n') % dest) |
|
60 |
rdst = hg.repository(ui, dest, create=True) |
|
61 |
if not rdst.local(): |
|
62 |
raise util.Abort(_('%s is not a local Mercurial repo') % dest) |
|
63 |
except error.RepoError: |
|
64 |
ui.traceback() |
|
65 |
raise util.Abort(_('%s is not a repo') % dest) |
|
66 |
||
15171
547da6115d1d
largefiles: eliminate naked exceptions
Matt Mackall <mpm@selenic.com>
parents:
15170
diff
changeset
|
67 |
success = False |
15168 | 68 |
try: |
69 |
# Lock destination to prevent modification while it is converted to. |
|
70 |
# Don't need to lock src because we are just reading from its history |
|
71 |
# which can't change. |
|
72 |
dst_lock = rdst.lock() |
|
73 |
||
74 |
# Get a list of all changesets in the source. The easy way to do this |
|
75 |
# is to simply walk the changelog, using changelog.nodesbewteen(). |
|
76 |
# Take a look at mercurial/revlog.py:639 for more details. |
|
77 |
# Use a generator instead of a list to decrease memory usage |
|
78 |
ctxs = (rsrc[ctx] for ctx in rsrc.changelog.nodesbetween(None, |
|
79 |
rsrc.heads())[0]) |
|
80 |
revmap = {node.nullid: node.nullid} |
|
81 |
if tolfile: |
|
82 |
lfiles = set() |
|
83 |
normalfiles = set() |
|
84 |
if not pats: |
|
85 |
pats = ui.config(lfutil.longname, 'patterns', default=()) |
|
86 |
if pats: |
|
87 |
pats = pats.split(' ') |
|
88 |
if pats: |
|
89 |
matcher = match_.match(rsrc.root, '', list(pats)) |
|
90 |
else: |
|
91 |
matcher = None |
|
92 |
||
93 |
lfiletohash = {} |
|
94 |
for ctx in ctxs: |
|
95 |
ui.progress(_('converting revisions'), ctx.rev(), |
|
96 |
unit=_('revision'), total=rsrc['tip'].rev()) |
|
97 |
_lfconvert_addchangeset(rsrc, rdst, ctx, revmap, |
|
98 |
lfiles, normalfiles, matcher, size, lfiletohash) |
|
99 |
ui.progress(_('converting revisions'), None) |
|
100 |
||
101 |
if os.path.exists(rdst.wjoin(lfutil.shortname)): |
|
102 |
shutil.rmtree(rdst.wjoin(lfutil.shortname)) |
|
103 |
||
104 |
for f in lfiletohash.keys(): |
|
105 |
if os.path.isfile(rdst.wjoin(f)): |
|
106 |
os.unlink(rdst.wjoin(f)) |
|
107 |
try: |
|
108 |
os.removedirs(os.path.dirname(rdst.wjoin(f))) |
|
15171
547da6115d1d
largefiles: eliminate naked exceptions
Matt Mackall <mpm@selenic.com>
parents:
15170
diff
changeset
|
109 |
except OSError: |
15168 | 110 |
pass |
111 |
||
112 |
else: |
|
113 |
for ctx in ctxs: |
|
114 |
ui.progress(_('converting revisions'), ctx.rev(), |
|
115 |
unit=_('revision'), total=rsrc['tip'].rev()) |
|
116 |
_addchangeset(ui, rsrc, rdst, ctx, revmap) |
|
117 |
||
118 |
ui.progress(_('converting revisions'), None) |
|
15171
547da6115d1d
largefiles: eliminate naked exceptions
Matt Mackall <mpm@selenic.com>
parents:
15170
diff
changeset
|
119 |
success = True |
15168 | 120 |
finally: |
15171
547da6115d1d
largefiles: eliminate naked exceptions
Matt Mackall <mpm@selenic.com>
parents:
15170
diff
changeset
|
121 |
if not success: |
547da6115d1d
largefiles: eliminate naked exceptions
Matt Mackall <mpm@selenic.com>
parents:
15170
diff
changeset
|
122 |
# we failed, remove the new directory |
547da6115d1d
largefiles: eliminate naked exceptions
Matt Mackall <mpm@selenic.com>
parents:
15170
diff
changeset
|
123 |
shutil.rmtree(rdst.root) |
15168 | 124 |
dst_lock.release() |
125 |
||
126 |
def _addchangeset(ui, rsrc, rdst, ctx, revmap): |
|
127 |
# Convert src parents to dst parents |
|
128 |
parents = [] |
|
129 |
for p in ctx.parents(): |
|
130 |
parents.append(revmap[p.node()]) |
|
131 |
while len(parents) < 2: |
|
132 |
parents.append(node.nullid) |
|
133 |
||
134 |
# Generate list of changed files |
|
135 |
files = set(ctx.files()) |
|
136 |
if node.nullid not in parents: |
|
137 |
mc = ctx.manifest() |
|
138 |
mp1 = ctx.parents()[0].manifest() |
|
139 |
mp2 = ctx.parents()[1].manifest() |
|
140 |
files |= (set(mp1) | set(mp2)) - set(mc) |
|
141 |
for f in mc: |
|
142 |
if mc[f] != mp1.get(f, None) or mc[f] != mp2.get(f, None): |
|
143 |
files.add(f) |
|
144 |
||
145 |
def getfilectx(repo, memctx, f): |
|
146 |
if lfutil.standin(f) in files: |
|
147 |
# if the file isn't in the manifest then it was removed |
|
148 |
# or renamed, raise IOError to indicate this |
|
149 |
try: |
|
150 |
fctx = ctx.filectx(lfutil.standin(f)) |
|
151 |
except error.LookupError: |
|
152 |
raise IOError() |
|
153 |
renamed = fctx.renamed() |
|
154 |
if renamed: |
|
155 |
renamed = lfutil.splitstandin(renamed[0]) |
|
156 |
||
157 |
hash = fctx.data().strip() |
|
158 |
path = lfutil.findfile(rsrc, hash) |
|
159 |
### TODO: What if the file is not cached? |
|
160 |
data = '' |
|
161 |
fd = None |
|
162 |
try: |
|
163 |
fd = open(path, 'rb') |
|
164 |
data = fd.read() |
|
165 |
finally: |
|
15172
fb1dcd2aae2a
largefiles: fix multistatement line
Matt Mackall <mpm@selenic.com>
parents:
15171
diff
changeset
|
166 |
if fd: |
fb1dcd2aae2a
largefiles: fix multistatement line
Matt Mackall <mpm@selenic.com>
parents:
15171
diff
changeset
|
167 |
fd.close() |
15168 | 168 |
return context.memfilectx(f, data, 'l' in fctx.flags(), |
169 |
'x' in fctx.flags(), renamed) |
|
170 |
else: |
|
171 |
try: |
|
172 |
fctx = ctx.filectx(f) |
|
173 |
except error.LookupError: |
|
174 |
raise IOError() |
|
175 |
renamed = fctx.renamed() |
|
176 |
if renamed: |
|
177 |
renamed = renamed[0] |
|
178 |
data = fctx.data() |
|
179 |
if f == '.hgtags': |
|
180 |
newdata = [] |
|
181 |
for line in data.splitlines(): |
|
182 |
id, name = line.split(' ', 1) |
|
183 |
newdata.append('%s %s\n' % (node.hex(revmap[node.bin(id)]), |
|
184 |
name)) |
|
185 |
data = ''.join(newdata) |
|
186 |
return context.memfilectx(f, data, 'l' in fctx.flags(), |
|
187 |
'x' in fctx.flags(), renamed) |
|
188 |
||
189 |
dstfiles = [] |
|
190 |
for file in files: |
|
191 |
if lfutil.isstandin(file): |
|
192 |
dstfiles.append(lfutil.splitstandin(file)) |
|
193 |
else: |
|
194 |
dstfiles.append(file) |
|
195 |
# Commit |
|
196 |
mctx = context.memctx(rdst, parents, ctx.description(), dstfiles, |
|
197 |
getfilectx, ctx.user(), ctx.date(), ctx.extra()) |
|
198 |
ret = rdst.commitctx(mctx) |
|
199 |
rdst.dirstate.setparents(ret) |
|
200 |
revmap[ctx.node()] = rdst.changelog.tip() |
|
201 |
||
202 |
def _lfconvert_addchangeset(rsrc, rdst, ctx, revmap, lfiles, normalfiles, |
|
203 |
matcher, size, lfiletohash): |
|
204 |
# Convert src parents to dst parents |
|
205 |
parents = [] |
|
206 |
for p in ctx.parents(): |
|
207 |
parents.append(revmap[p.node()]) |
|
208 |
while len(parents) < 2: |
|
209 |
parents.append(node.nullid) |
|
210 |
||
211 |
# Generate list of changed files |
|
212 |
files = set(ctx.files()) |
|
213 |
if node.nullid not in parents: |
|
214 |
mc = ctx.manifest() |
|
215 |
mp1 = ctx.parents()[0].manifest() |
|
216 |
mp2 = ctx.parents()[1].manifest() |
|
217 |
files |= (set(mp1) | set(mp2)) - set(mc) |
|
218 |
for f in mc: |
|
219 |
if mc[f] != mp1.get(f, None) or mc[f] != mp2.get(f, None): |
|
220 |
files.add(f) |
|
221 |
||
222 |
dstfiles = [] |
|
223 |
for f in files: |
|
224 |
if f not in lfiles and f not in normalfiles: |
|
225 |
islfile = _islfile(f, ctx, matcher, size) |
|
226 |
# If this file was renamed or copied then copy |
|
227 |
# the lfileness of its predecessor |
|
228 |
if f in ctx.manifest(): |
|
229 |
fctx = ctx.filectx(f) |
|
230 |
renamed = fctx.renamed() |
|
231 |
renamedlfile = renamed and renamed[0] in lfiles |
|
232 |
islfile |= renamedlfile |
|
233 |
if 'l' in fctx.flags(): |
|
234 |
if renamedlfile: |
|
235 |
raise util.Abort( |
|
15170
c1a4a3220711
largefiles: fix over-long lines
Matt Mackall <mpm@selenic.com>
parents:
15168
diff
changeset
|
236 |
_('Renamed/copied largefile %s becomes symlink') |
c1a4a3220711
largefiles: fix over-long lines
Matt Mackall <mpm@selenic.com>
parents:
15168
diff
changeset
|
237 |
% f) |
15168 | 238 |
islfile = False |
239 |
if islfile: |
|
240 |
lfiles.add(f) |
|
241 |
else: |
|
242 |
normalfiles.add(f) |
|
243 |
||
244 |
if f in lfiles: |
|
245 |
dstfiles.append(lfutil.standin(f)) |
|
246 |
# lfile in manifest if it has not been removed/renamed |
|
247 |
if f in ctx.manifest(): |
|
248 |
if 'l' in ctx.filectx(f).flags(): |
|
249 |
if renamed and renamed[0] in lfiles: |
|
250 |
raise util.Abort(_('largefile %s becomes symlink') % f) |
|
251 |
||
252 |
# lfile was modified, update standins |
|
253 |
fullpath = rdst.wjoin(f) |
|
254 |
lfutil.createdir(os.path.dirname(fullpath)) |
|
255 |
m = util.sha1('') |
|
256 |
m.update(ctx[f].data()) |
|
257 |
hash = m.hexdigest() |
|
258 |
if f not in lfiletohash or lfiletohash[f] != hash: |
|
259 |
try: |
|
260 |
fd = open(fullpath, 'wb') |
|
261 |
fd.write(ctx[f].data()) |
|
262 |
finally: |
|
263 |
if fd: |
|
264 |
fd.close() |
|
265 |
executable = 'x' in ctx[f].flags() |
|
266 |
os.chmod(fullpath, lfutil.getmode(executable)) |
|
267 |
lfutil.writestandin(rdst, lfutil.standin(f), hash, |
|
268 |
executable) |
|
269 |
lfiletohash[f] = hash |
|
270 |
else: |
|
271 |
# normal file |
|
272 |
dstfiles.append(f) |
|
273 |
||
274 |
def getfilectx(repo, memctx, f): |
|
275 |
if lfutil.isstandin(f): |
|
276 |
# if the file isn't in the manifest then it was removed |
|
277 |
# or renamed, raise IOError to indicate this |
|
278 |
srcfname = lfutil.splitstandin(f) |
|
279 |
try: |
|
280 |
fctx = ctx.filectx(srcfname) |
|
281 |
except error.LookupError: |
|
282 |
raise IOError() |
|
283 |
renamed = fctx.renamed() |
|
284 |
if renamed: |
|
285 |
# standin is always a lfile because lfileness |
|
286 |
# doesn't change after rename or copy |
|
287 |
renamed = lfutil.standin(renamed[0]) |
|
288 |
||
289 |
return context.memfilectx(f, lfiletohash[srcfname], 'l' in |
|
290 |
fctx.flags(), 'x' in fctx.flags(), renamed) |
|
291 |
else: |
|
292 |
try: |
|
293 |
fctx = ctx.filectx(f) |
|
294 |
except error.LookupError: |
|
295 |
raise IOError() |
|
296 |
renamed = fctx.renamed() |
|
297 |
if renamed: |
|
298 |
renamed = renamed[0] |
|
299 |
||
300 |
data = fctx.data() |
|
301 |
if f == '.hgtags': |
|
302 |
newdata = [] |
|
303 |
for line in data.splitlines(): |
|
304 |
id, name = line.split(' ', 1) |
|
305 |
newdata.append('%s %s\n' % (node.hex(revmap[node.bin(id)]), |
|
306 |
name)) |
|
307 |
data = ''.join(newdata) |
|
308 |
return context.memfilectx(f, data, 'l' in fctx.flags(), |
|
309 |
'x' in fctx.flags(), renamed) |
|
310 |
||
311 |
# Commit |
|
312 |
mctx = context.memctx(rdst, parents, ctx.description(), dstfiles, |
|
313 |
getfilectx, ctx.user(), ctx.date(), ctx.extra()) |
|
314 |
ret = rdst.commitctx(mctx) |
|
315 |
rdst.dirstate.setparents(ret) |
|
316 |
revmap[ctx.node()] = rdst.changelog.tip() |
|
317 |
||
318 |
def _islfile(file, ctx, matcher, size): |
|
15252
6e809bb4f969
largefiles: improve comments, internal docstrings
Greg Ward <greg@gerg.ca>
parents:
15230
diff
changeset
|
319 |
'''Return true if file should be considered a largefile, i.e. |
6e809bb4f969
largefiles: improve comments, internal docstrings
Greg Ward <greg@gerg.ca>
parents:
15230
diff
changeset
|
320 |
matcher matches it or it is larger than size.''' |
6e809bb4f969
largefiles: improve comments, internal docstrings
Greg Ward <greg@gerg.ca>
parents:
15230
diff
changeset
|
321 |
# never store special .hg* files as largefiles |
15168 | 322 |
if file == '.hgtags' or file == '.hgignore' or file == '.hgsigs': |
323 |
return False |
|
324 |
if matcher and matcher(file): |
|
325 |
return True |
|
326 |
try: |
|
327 |
return ctx.filectx(file).size() >= size * 1024 * 1024 |
|
328 |
except error.LookupError: |
|
329 |
return False |
|
330 |
||
331 |
def uploadlfiles(ui, rsrc, rdst, files): |
|
332 |
'''upload largefiles to the central store''' |
|
333 |
||
334 |
# Don't upload locally. All largefiles are in the system wide cache |
|
335 |
# so the other repo can just get them from there. |
|
336 |
if not files or rdst.local(): |
|
337 |
return |
|
338 |
||
339 |
store = basestore._openstore(rsrc, rdst, put=True) |
|
340 |
||
341 |
at = 0 |
|
342 |
files = filter(lambda h: not store.exists(h), files) |
|
343 |
for hash in files: |
|
15170
c1a4a3220711
largefiles: fix over-long lines
Matt Mackall <mpm@selenic.com>
parents:
15168
diff
changeset
|
344 |
ui.progress(_('uploading largefiles'), at, unit='largefile', |
c1a4a3220711
largefiles: fix over-long lines
Matt Mackall <mpm@selenic.com>
parents:
15168
diff
changeset
|
345 |
total=len(files)) |
15168 | 346 |
source = lfutil.findfile(rsrc, hash) |
347 |
if not source: |
|
15253
67d010779907
largefiles: improve error reporting
Greg Ward <greg@gerg.ca>
parents:
15252
diff
changeset
|
348 |
raise util.Abort(_('largefile %s missing from store' |
67d010779907
largefiles: improve error reporting
Greg Ward <greg@gerg.ca>
parents:
15252
diff
changeset
|
349 |
' (needs to be uploaded)') % hash) |
15168 | 350 |
# XXX check for errors here |
351 |
store.put(source, hash) |
|
352 |
at += 1 |
|
15173
3d27a8ff895f
largefiles: mark a string for translation
Matt Mackall <mpm@selenic.com>
parents:
15172
diff
changeset
|
353 |
ui.progress(_('uploading largefiles'), None) |
15168 | 354 |
|
355 |
def verifylfiles(ui, repo, all=False, contents=False): |
|
356 |
'''Verify that every big file revision in the current changeset |
|
357 |
exists in the central store. With --contents, also verify that |
|
358 |
the contents of each big file revision are correct (SHA-1 hash |
|
359 |
matches the revision ID). With --all, check every changeset in |
|
360 |
this repository.''' |
|
361 |
if all: |
|
362 |
# Pass a list to the function rather than an iterator because we know a |
|
363 |
# list will work. |
|
364 |
revs = range(len(repo)) |
|
365 |
else: |
|
366 |
revs = ['.'] |
|
367 |
||
368 |
store = basestore._openstore(repo) |
|
369 |
return store.verify(revs, contents=contents) |
|
370 |
||
371 |
def cachelfiles(ui, repo, node): |
|
372 |
'''cachelfiles ensures that all largefiles needed by the specified revision |
|
373 |
are present in the repository's largefile cache. |
|
374 |
||
375 |
returns a tuple (cached, missing). cached is the list of files downloaded |
|
376 |
by this operation; missing is the list of files that were needed but could |
|
377 |
not be found.''' |
|
378 |
lfiles = lfutil.listlfiles(repo, node) |
|
379 |
toget = [] |
|
380 |
||
381 |
for lfile in lfiles: |
|
382 |
expectedhash = repo[node][lfutil.standin(lfile)].data().strip() |
|
383 |
# if it exists and its hash matches, it might have been locally |
|
384 |
# modified before updating and the user chose 'local'. in this case, |
|
385 |
# it will not be in any store, so don't look for it. |
|
386 |
if (not os.path.exists(repo.wjoin(lfile)) \ |
|
387 |
or expectedhash != lfutil.hashfile(repo.wjoin(lfile))) and \ |
|
388 |
not lfutil.findfile(repo, expectedhash): |
|
389 |
toget.append((lfile, expectedhash)) |
|
390 |
||
391 |
if toget: |
|
392 |
store = basestore._openstore(repo) |
|
393 |
ret = store.get(toget) |
|
394 |
return ret |
|
395 |
||
396 |
return ([], []) |
|
397 |
||
398 |
def updatelfiles(ui, repo, filelist=None, printmessage=True): |
|
399 |
wlock = repo.wlock() |
|
400 |
try: |
|
401 |
lfdirstate = lfutil.openlfdirstate(ui, repo) |
|
402 |
lfiles = set(lfutil.listlfiles(repo)) | set(lfdirstate) |
|
403 |
||
404 |
if filelist is not None: |
|
405 |
lfiles = [f for f in lfiles if f in filelist] |
|
406 |
||
407 |
printed = False |
|
408 |
if printmessage and lfiles: |
|
409 |
ui.status(_('getting changed largefiles\n')) |
|
410 |
printed = True |
|
411 |
cachelfiles(ui, repo, '.') |
|
412 |
||
413 |
updated, removed = 0, 0 |
|
414 |
for i in map(lambda f: _updatelfile(repo, lfdirstate, f), lfiles): |
|
415 |
# increment the appropriate counter according to _updatelfile's |
|
416 |
# return value |
|
417 |
updated += i > 0 and i or 0 |
|
418 |
removed -= i < 0 and i or 0 |
|
419 |
if printmessage and (removed or updated) and not printed: |
|
420 |
ui.status(_('getting changed largefiles\n')) |
|
421 |
printed = True |
|
422 |
||
423 |
lfdirstate.write() |
|
424 |
if printed and printmessage: |
|
425 |
ui.status(_('%d largefiles updated, %d removed\n') % (updated, |
|
426 |
removed)) |
|
427 |
finally: |
|
428 |
wlock.release() |
|
429 |
||
430 |
def _updatelfile(repo, lfdirstate, lfile): |
|
431 |
'''updates a single largefile and copies the state of its standin from |
|
432 |
the repository's dirstate to its state in the lfdirstate. |
|
433 |
||
434 |
returns 1 if the file was modified, -1 if the file was removed, 0 if the |
|
435 |
file was unchanged, and None if the needed largefile was missing from the |
|
436 |
cache.''' |
|
437 |
ret = 0 |
|
438 |
abslfile = repo.wjoin(lfile) |
|
439 |
absstandin = repo.wjoin(lfutil.standin(lfile)) |
|
440 |
if os.path.exists(absstandin): |
|
441 |
if os.path.exists(absstandin+'.orig'): |
|
442 |
shutil.copyfile(abslfile, abslfile+'.orig') |
|
443 |
expecthash = lfutil.readstandin(repo, lfile) |
|
444 |
if expecthash != '' and \ |
|
445 |
(not os.path.exists(abslfile) or \ |
|
446 |
expecthash != lfutil.hashfile(abslfile)): |
|
447 |
if not lfutil.copyfromcache(repo, expecthash, lfile): |
|
448 |
return None # don't try to set the mode or update the dirstate |
|
449 |
ret = 1 |
|
450 |
mode = os.stat(absstandin).st_mode |
|
451 |
if mode != os.stat(abslfile).st_mode: |
|
452 |
os.chmod(abslfile, mode) |
|
453 |
ret = 1 |
|
454 |
else: |
|
455 |
if os.path.exists(abslfile): |
|
456 |
os.unlink(abslfile) |
|
457 |
ret = -1 |
|
458 |
state = repo.dirstate[lfutil.standin(lfile)] |
|
459 |
if state == 'n': |
|
460 |
lfdirstate.normal(lfile) |
|
461 |
elif state == 'r': |
|
462 |
lfdirstate.remove(lfile) |
|
463 |
elif state == 'a': |
|
464 |
lfdirstate.add(lfile) |
|
465 |
elif state == '?': |
|
15224
7c604d8c7e83
largefiles: remove pre-1.9 code from extension first bundled with 1.9
Na'Tosha Bard <natosha@unity3d.com>
parents:
15173
diff
changeset
|
466 |
lfdirstate.drop(lfile) |
15168 | 467 |
return ret |
468 |
||
469 |
# -- hg commands declarations ------------------------------------------------ |
|
470 |
||
471 |
||
472 |
cmdtable = { |
|
473 |
'lfconvert': (lfconvert, |
|
15230 | 474 |
[('s', 'size', '', |
475 |
_('minimum size (MB) for files to be converted ' |
|
476 |
'as largefiles'), |
|
477 |
'SIZE'), |
|
478 |
('', 'tonormal', False, |
|
479 |
_('convert from a largefiles repo to a normal repo')), |
|
480 |
], |
|
15168 | 481 |
_('hg lfconvert SOURCE DEST [FILE ...]')), |
482 |
} |