Mercurial > hg
annotate hgext/largefiles/lfcommands.py @ 15392:d7bfbc92a1c0 stable
util: add a doctest for empty sha() calls
author | Matt Mackall <mpm@selenic.com> |
---|---|
date | Mon, 31 Oct 2011 15:41:39 -0500 |
parents | a53888685a6c |
children | 6a7e874390b0 |
rev | line source |
---|---|
15168 | 1 # Copyright 2009-2010 Gregory P. Ward |
2 # Copyright 2009-2010 Intelerad Medical Systems Incorporated | |
3 # Copyright 2010-2011 Fog Creek Software | |
4 # Copyright 2010-2011 Unity Technologies | |
5 # | |
6 # This software may be used and distributed according to the terms of the | |
7 # GNU General Public License version 2 or any later version. | |
8 | |
15252
6e809bb4f969
largefiles: improve comments, internal docstrings
Greg Ward <greg@gerg.ca>
parents:
15230
diff
changeset
|
9 '''High-level command function for lfconvert, plus the cmdtable.''' |
15168 | 10 |
11 import os | |
12 import shutil | |
13 | |
14 from mercurial import util, match as match_, hg, node, context, error | |
15 from mercurial.i18n import _ | |
16 | |
17 import lfutil | |
18 import basestore | |
19 | |
20 # -- Commands ---------------------------------------------------------- | |
21 | |
22 def lfconvert(ui, src, dest, *pats, **opts): | |
15230 | 23 '''convert a normal repository to a largefiles repository |
15168 | 24 |
15230 | 25 Convert repository SOURCE to a new repository DEST, identical to |
26 SOURCE except that certain files will be converted as largefiles: | |
27 specifically, any file that matches any PATTERN *or* whose size is | |
28 above the minimum size threshold is converted as a largefile. The | |
29 size used to determine whether or not to track a file as a | |
30 largefile is the size of the first version of the file. The | |
31 minimum size can be specified either with --size or in | |
32 configuration as ``largefiles.size``. | |
33 | |
34 After running this command you will need to make sure that | |
35 largefiles is enabled anywhere you intend to push the new | |
36 repository. | |
37 | |
15332
0db47b8d025f
largefiles: rename lfconvert --tonormal option to --to-normal
Greg Ward <greg@gerg.ca>
parents:
15317
diff
changeset
|
38 Use --to-normal to convert largefiles back to normal files; after |
15230 | 39 this, the DEST repository can be used without largefiles at all.''' |
15168 | 40 |
15332
0db47b8d025f
largefiles: rename lfconvert --tonormal option to --to-normal
Greg Ward <greg@gerg.ca>
parents:
15317
diff
changeset
|
41 if opts['to_normal']: |
15168 | 42 tolfile = False |
43 else: | |
44 tolfile = True | |
15227
a7686abf73a6
largefiles: factor out lfutil.getminsize()
Greg Ward <greg@gerg.ca>
parents:
15224
diff
changeset
|
45 size = lfutil.getminsize(ui, True, opts.get('size'), default=None) |
15340
0e58513cc59a
largefiles: rearrange how lfconvert detects non-local repos
Greg Ward <greg@gerg.ca>
parents:
15339
diff
changeset
|
46 |
0e58513cc59a
largefiles: rearrange how lfconvert detects non-local repos
Greg Ward <greg@gerg.ca>
parents:
15339
diff
changeset
|
47 if not hg.islocal(src): |
0e58513cc59a
largefiles: rearrange how lfconvert detects non-local repos
Greg Ward <greg@gerg.ca>
parents:
15339
diff
changeset
|
48 raise util.Abort(_('%s is not a local Mercurial repo') % src) |
0e58513cc59a
largefiles: rearrange how lfconvert detects non-local repos
Greg Ward <greg@gerg.ca>
parents:
15339
diff
changeset
|
49 if not hg.islocal(dest): |
0e58513cc59a
largefiles: rearrange how lfconvert detects non-local repos
Greg Ward <greg@gerg.ca>
parents:
15339
diff
changeset
|
50 raise util.Abort(_('%s is not a local Mercurial repo') % dest) |
0e58513cc59a
largefiles: rearrange how lfconvert detects non-local repos
Greg Ward <greg@gerg.ca>
parents:
15339
diff
changeset
|
51 |
15339
be1377d19018
largefiles: test lfconvert error handling; remove redundant code
Greg Ward <greg@gerg.ca>
parents:
15332
diff
changeset
|
52 rsrc = hg.repository(ui, src) |
be1377d19018
largefiles: test lfconvert error handling; remove redundant code
Greg Ward <greg@gerg.ca>
parents:
15332
diff
changeset
|
53 ui.status(_('initializing destination %s\n') % dest) |
be1377d19018
largefiles: test lfconvert error handling; remove redundant code
Greg Ward <greg@gerg.ca>
parents:
15332
diff
changeset
|
54 rdst = hg.repository(ui, dest, create=True) |
15168 | 55 |
15171
547da6115d1d
largefiles: eliminate naked exceptions
Matt Mackall <mpm@selenic.com>
parents:
15170
diff
changeset
|
56 success = False |
15168 | 57 try: |
58 # Lock destination to prevent modification while it is converted to. | |
59 # Don't need to lock src because we are just reading from its history | |
60 # which can't change. | |
61 dst_lock = rdst.lock() | |
62 | |
63 # Get a list of all changesets in the source. The easy way to do this | |
64 # is to simply walk the changelog, using changelog.nodesbewteen(). | |
65 # Take a look at mercurial/revlog.py:639 for more details. | |
66 # Use a generator instead of a list to decrease memory usage | |
67 ctxs = (rsrc[ctx] for ctx in rsrc.changelog.nodesbetween(None, | |
68 rsrc.heads())[0]) | |
69 revmap = {node.nullid: node.nullid} | |
70 if tolfile: | |
71 lfiles = set() | |
72 normalfiles = set() | |
73 if not pats: | |
74 pats = ui.config(lfutil.longname, 'patterns', default=()) | |
75 if pats: | |
76 pats = pats.split(' ') | |
77 if pats: | |
78 matcher = match_.match(rsrc.root, '', list(pats)) | |
79 else: | |
80 matcher = None | |
81 | |
82 lfiletohash = {} | |
83 for ctx in ctxs: | |
84 ui.progress(_('converting revisions'), ctx.rev(), | |
85 unit=_('revision'), total=rsrc['tip'].rev()) | |
86 _lfconvert_addchangeset(rsrc, rdst, ctx, revmap, | |
87 lfiles, normalfiles, matcher, size, lfiletohash) | |
88 ui.progress(_('converting revisions'), None) | |
89 | |
90 if os.path.exists(rdst.wjoin(lfutil.shortname)): | |
91 shutil.rmtree(rdst.wjoin(lfutil.shortname)) | |
92 | |
93 for f in lfiletohash.keys(): | |
94 if os.path.isfile(rdst.wjoin(f)): | |
95 os.unlink(rdst.wjoin(f)) | |
96 try: | |
97 os.removedirs(os.path.dirname(rdst.wjoin(f))) | |
15171
547da6115d1d
largefiles: eliminate naked exceptions
Matt Mackall <mpm@selenic.com>
parents:
15170
diff
changeset
|
98 except OSError: |
15168 | 99 pass |
100 | |
15303
07811b3b119b
largefiles: include 'largefiles' in converted repository requirements
Eli Carter <eli.carter@tektronix.com>
parents:
15255
diff
changeset
|
101 # If there were any files converted to largefiles, add largefiles |
07811b3b119b
largefiles: include 'largefiles' in converted repository requirements
Eli Carter <eli.carter@tektronix.com>
parents:
15255
diff
changeset
|
102 # to the destination repository's requirements. |
07811b3b119b
largefiles: include 'largefiles' in converted repository requirements
Eli Carter <eli.carter@tektronix.com>
parents:
15255
diff
changeset
|
103 if lfiles: |
07811b3b119b
largefiles: include 'largefiles' in converted repository requirements
Eli Carter <eli.carter@tektronix.com>
parents:
15255
diff
changeset
|
104 rdst.requirements.add('largefiles') |
07811b3b119b
largefiles: include 'largefiles' in converted repository requirements
Eli Carter <eli.carter@tektronix.com>
parents:
15255
diff
changeset
|
105 rdst._writerequirements() |
15168 | 106 else: |
107 for ctx in ctxs: | |
108 ui.progress(_('converting revisions'), ctx.rev(), | |
109 unit=_('revision'), total=rsrc['tip'].rev()) | |
110 _addchangeset(ui, rsrc, rdst, ctx, revmap) | |
111 | |
112 ui.progress(_('converting revisions'), None) | |
15171
547da6115d1d
largefiles: eliminate naked exceptions
Matt Mackall <mpm@selenic.com>
parents:
15170
diff
changeset
|
113 success = True |
15168 | 114 finally: |
15171
547da6115d1d
largefiles: eliminate naked exceptions
Matt Mackall <mpm@selenic.com>
parents:
15170
diff
changeset
|
115 if not success: |
547da6115d1d
largefiles: eliminate naked exceptions
Matt Mackall <mpm@selenic.com>
parents:
15170
diff
changeset
|
116 # we failed, remove the new directory |
547da6115d1d
largefiles: eliminate naked exceptions
Matt Mackall <mpm@selenic.com>
parents:
15170
diff
changeset
|
117 shutil.rmtree(rdst.root) |
15168 | 118 dst_lock.release() |
119 | |
120 def _addchangeset(ui, rsrc, rdst, ctx, revmap): | |
121 # Convert src parents to dst parents | |
122 parents = [] | |
123 for p in ctx.parents(): | |
124 parents.append(revmap[p.node()]) | |
125 while len(parents) < 2: | |
126 parents.append(node.nullid) | |
127 | |
128 # Generate list of changed files | |
129 files = set(ctx.files()) | |
130 if node.nullid not in parents: | |
131 mc = ctx.manifest() | |
132 mp1 = ctx.parents()[0].manifest() | |
133 mp2 = ctx.parents()[1].manifest() | |
134 files |= (set(mp1) | set(mp2)) - set(mc) | |
135 for f in mc: | |
136 if mc[f] != mp1.get(f, None) or mc[f] != mp2.get(f, None): | |
137 files.add(f) | |
138 | |
139 def getfilectx(repo, memctx, f): | |
140 if lfutil.standin(f) in files: | |
141 # if the file isn't in the manifest then it was removed | |
142 # or renamed, raise IOError to indicate this | |
143 try: | |
144 fctx = ctx.filectx(lfutil.standin(f)) | |
145 except error.LookupError: | |
146 raise IOError() | |
147 renamed = fctx.renamed() | |
148 if renamed: | |
149 renamed = lfutil.splitstandin(renamed[0]) | |
150 | |
151 hash = fctx.data().strip() | |
152 path = lfutil.findfile(rsrc, hash) | |
153 ### TODO: What if the file is not cached? | |
154 data = '' | |
155 fd = None | |
156 try: | |
157 fd = open(path, 'rb') | |
158 data = fd.read() | |
159 finally: | |
15172
fb1dcd2aae2a
largefiles: fix multistatement line
Matt Mackall <mpm@selenic.com>
parents:
15171
diff
changeset
|
160 if fd: |
fb1dcd2aae2a
largefiles: fix multistatement line
Matt Mackall <mpm@selenic.com>
parents:
15171
diff
changeset
|
161 fd.close() |
15168 | 162 return context.memfilectx(f, data, 'l' in fctx.flags(), |
163 'x' in fctx.flags(), renamed) | |
164 else: | |
165 try: | |
166 fctx = ctx.filectx(f) | |
167 except error.LookupError: | |
168 raise IOError() | |
169 renamed = fctx.renamed() | |
170 if renamed: | |
171 renamed = renamed[0] | |
172 data = fctx.data() | |
173 if f == '.hgtags': | |
174 newdata = [] | |
175 for line in data.splitlines(): | |
176 id, name = line.split(' ', 1) | |
177 newdata.append('%s %s\n' % (node.hex(revmap[node.bin(id)]), | |
178 name)) | |
179 data = ''.join(newdata) | |
180 return context.memfilectx(f, data, 'l' in fctx.flags(), | |
181 'x' in fctx.flags(), renamed) | |
182 | |
183 dstfiles = [] | |
184 for file in files: | |
185 if lfutil.isstandin(file): | |
186 dstfiles.append(lfutil.splitstandin(file)) | |
187 else: | |
188 dstfiles.append(file) | |
189 # Commit | |
190 mctx = context.memctx(rdst, parents, ctx.description(), dstfiles, | |
191 getfilectx, ctx.user(), ctx.date(), ctx.extra()) | |
192 ret = rdst.commitctx(mctx) | |
193 rdst.dirstate.setparents(ret) | |
194 revmap[ctx.node()] = rdst.changelog.tip() | |
195 | |
196 def _lfconvert_addchangeset(rsrc, rdst, ctx, revmap, lfiles, normalfiles, | |
197 matcher, size, lfiletohash): | |
198 # Convert src parents to dst parents | |
199 parents = [] | |
200 for p in ctx.parents(): | |
201 parents.append(revmap[p.node()]) | |
202 while len(parents) < 2: | |
203 parents.append(node.nullid) | |
204 | |
205 # Generate list of changed files | |
206 files = set(ctx.files()) | |
207 if node.nullid not in parents: | |
208 mc = ctx.manifest() | |
209 mp1 = ctx.parents()[0].manifest() | |
210 mp2 = ctx.parents()[1].manifest() | |
211 files |= (set(mp1) | set(mp2)) - set(mc) | |
212 for f in mc: | |
213 if mc[f] != mp1.get(f, None) or mc[f] != mp2.get(f, None): | |
214 files.add(f) | |
215 | |
216 dstfiles = [] | |
217 for f in files: | |
218 if f not in lfiles and f not in normalfiles: | |
219 islfile = _islfile(f, ctx, matcher, size) | |
220 # If this file was renamed or copied then copy | |
221 # the lfileness of its predecessor | |
222 if f in ctx.manifest(): | |
223 fctx = ctx.filectx(f) | |
224 renamed = fctx.renamed() | |
225 renamedlfile = renamed and renamed[0] in lfiles | |
226 islfile |= renamedlfile | |
227 if 'l' in fctx.flags(): | |
228 if renamedlfile: | |
229 raise util.Abort( | |
15380
a53888685a6c
largefiles: fix uppercase in abort message
Martin Geisler <mg@aragost.com>
parents:
15371
diff
changeset
|
230 _('renamed/copied largefile %s becomes symlink') |
15170
c1a4a3220711
largefiles: fix over-long lines
Matt Mackall <mpm@selenic.com>
parents:
15168
diff
changeset
|
231 % f) |
15168 | 232 islfile = False |
233 if islfile: | |
234 lfiles.add(f) | |
235 else: | |
236 normalfiles.add(f) | |
237 | |
238 if f in lfiles: | |
239 dstfiles.append(lfutil.standin(f)) | |
15254
dd03d3a9f888
largefiles: more work on cleaning up comments
Greg Ward <greg@gerg.ca>
parents:
15253
diff
changeset
|
240 # largefile in manifest if it has not been removed/renamed |
15168 | 241 if f in ctx.manifest(): |
242 if 'l' in ctx.filectx(f).flags(): | |
243 if renamed and renamed[0] in lfiles: | |
244 raise util.Abort(_('largefile %s becomes symlink') % f) | |
245 | |
15254
dd03d3a9f888
largefiles: more work on cleaning up comments
Greg Ward <greg@gerg.ca>
parents:
15253
diff
changeset
|
246 # largefile was modified, update standins |
15168 | 247 fullpath = rdst.wjoin(f) |
15371
f26ed4ea46d8
largefiles: remove lfutil.createdir, replace calls with util.makedirs
Hao Lian <hao@fogcreek.com>
parents:
15340
diff
changeset
|
248 util.makedirs(os.path.dirname(fullpath)) |
15168 | 249 m = util.sha1('') |
250 m.update(ctx[f].data()) | |
251 hash = m.hexdigest() | |
252 if f not in lfiletohash or lfiletohash[f] != hash: | |
253 try: | |
254 fd = open(fullpath, 'wb') | |
255 fd.write(ctx[f].data()) | |
256 finally: | |
257 if fd: | |
258 fd.close() | |
259 executable = 'x' in ctx[f].flags() | |
260 os.chmod(fullpath, lfutil.getmode(executable)) | |
261 lfutil.writestandin(rdst, lfutil.standin(f), hash, | |
262 executable) | |
263 lfiletohash[f] = hash | |
264 else: | |
265 # normal file | |
266 dstfiles.append(f) | |
267 | |
268 def getfilectx(repo, memctx, f): | |
269 if lfutil.isstandin(f): | |
270 # if the file isn't in the manifest then it was removed | |
271 # or renamed, raise IOError to indicate this | |
272 srcfname = lfutil.splitstandin(f) | |
273 try: | |
274 fctx = ctx.filectx(srcfname) | |
275 except error.LookupError: | |
276 raise IOError() | |
277 renamed = fctx.renamed() | |
278 if renamed: | |
15254
dd03d3a9f888
largefiles: more work on cleaning up comments
Greg Ward <greg@gerg.ca>
parents:
15253
diff
changeset
|
279 # standin is always a largefile because largefile-ness |
15168 | 280 # doesn't change after rename or copy |
281 renamed = lfutil.standin(renamed[0]) | |
282 | |
15313
3eb1a90ea409
largefiles: fix newline for lfconverted repos
Eli Carter <eli.carter@tektronix.com>
parents:
15303
diff
changeset
|
283 return context.memfilectx(f, lfiletohash[srcfname] + '\n', 'l' in |
15168 | 284 fctx.flags(), 'x' in fctx.flags(), renamed) |
285 else: | |
286 try: | |
287 fctx = ctx.filectx(f) | |
288 except error.LookupError: | |
289 raise IOError() | |
290 renamed = fctx.renamed() | |
291 if renamed: | |
292 renamed = renamed[0] | |
293 | |
294 data = fctx.data() | |
295 if f == '.hgtags': | |
296 newdata = [] | |
297 for line in data.splitlines(): | |
298 id, name = line.split(' ', 1) | |
299 newdata.append('%s %s\n' % (node.hex(revmap[node.bin(id)]), | |
300 name)) | |
301 data = ''.join(newdata) | |
302 return context.memfilectx(f, data, 'l' in fctx.flags(), | |
303 'x' in fctx.flags(), renamed) | |
304 | |
305 # Commit | |
306 mctx = context.memctx(rdst, parents, ctx.description(), dstfiles, | |
307 getfilectx, ctx.user(), ctx.date(), ctx.extra()) | |
308 ret = rdst.commitctx(mctx) | |
309 rdst.dirstate.setparents(ret) | |
310 revmap[ctx.node()] = rdst.changelog.tip() | |
311 | |
312 def _islfile(file, ctx, matcher, size): | |
15252
6e809bb4f969
largefiles: improve comments, internal docstrings
Greg Ward <greg@gerg.ca>
parents:
15230
diff
changeset
|
313 '''Return true if file should be considered a largefile, i.e. |
6e809bb4f969
largefiles: improve comments, internal docstrings
Greg Ward <greg@gerg.ca>
parents:
15230
diff
changeset
|
314 matcher matches it or it is larger than size.''' |
6e809bb4f969
largefiles: improve comments, internal docstrings
Greg Ward <greg@gerg.ca>
parents:
15230
diff
changeset
|
315 # never store special .hg* files as largefiles |
15168 | 316 if file == '.hgtags' or file == '.hgignore' or file == '.hgsigs': |
317 return False | |
318 if matcher and matcher(file): | |
319 return True | |
320 try: | |
321 return ctx.filectx(file).size() >= size * 1024 * 1024 | |
322 except error.LookupError: | |
323 return False | |
324 | |
325 def uploadlfiles(ui, rsrc, rdst, files): | |
326 '''upload largefiles to the central store''' | |
327 | |
15317
41f371150ccb
largefiles: make the store primary, and the user cache secondary
Benjamin Pollack <benjamin@bitquabit.com>
parents:
15313
diff
changeset
|
328 if not files: |
15168 | 329 return |
330 | |
331 store = basestore._openstore(rsrc, rdst, put=True) | |
332 | |
333 at = 0 | |
334 files = filter(lambda h: not store.exists(h), files) | |
335 for hash in files: | |
15170
c1a4a3220711
largefiles: fix over-long lines
Matt Mackall <mpm@selenic.com>
parents:
15168
diff
changeset
|
336 ui.progress(_('uploading largefiles'), at, unit='largefile', |
c1a4a3220711
largefiles: fix over-long lines
Matt Mackall <mpm@selenic.com>
parents:
15168
diff
changeset
|
337 total=len(files)) |
15168 | 338 source = lfutil.findfile(rsrc, hash) |
339 if not source: | |
15253
67d010779907
largefiles: improve error reporting
Greg Ward <greg@gerg.ca>
parents:
15252
diff
changeset
|
340 raise util.Abort(_('largefile %s missing from store' |
67d010779907
largefiles: improve error reporting
Greg Ward <greg@gerg.ca>
parents:
15252
diff
changeset
|
341 ' (needs to be uploaded)') % hash) |
15168 | 342 # XXX check for errors here |
343 store.put(source, hash) | |
344 at += 1 | |
15173
3d27a8ff895f
largefiles: mark a string for translation
Matt Mackall <mpm@selenic.com>
parents:
15172
diff
changeset
|
345 ui.progress(_('uploading largefiles'), None) |
15168 | 346 |
347 def verifylfiles(ui, repo, all=False, contents=False): | |
348 '''Verify that every big file revision in the current changeset | |
349 exists in the central store. With --contents, also verify that | |
350 the contents of each big file revision are correct (SHA-1 hash | |
351 matches the revision ID). With --all, check every changeset in | |
352 this repository.''' | |
353 if all: | |
354 # Pass a list to the function rather than an iterator because we know a | |
355 # list will work. | |
356 revs = range(len(repo)) | |
357 else: | |
358 revs = ['.'] | |
359 | |
360 store = basestore._openstore(repo) | |
361 return store.verify(revs, contents=contents) | |
362 | |
363 def cachelfiles(ui, repo, node): | |
364 '''cachelfiles ensures that all largefiles needed by the specified revision | |
365 are present in the repository's largefile cache. | |
366 | |
367 returns a tuple (cached, missing). cached is the list of files downloaded | |
368 by this operation; missing is the list of files that were needed but could | |
369 not be found.''' | |
370 lfiles = lfutil.listlfiles(repo, node) | |
371 toget = [] | |
372 | |
373 for lfile in lfiles: | |
374 expectedhash = repo[node][lfutil.standin(lfile)].data().strip() | |
375 # if it exists and its hash matches, it might have been locally | |
376 # modified before updating and the user chose 'local'. in this case, | |
377 # it will not be in any store, so don't look for it. | |
15255
7ab05d752405
largefiles: cosmetics, whitespace, code style
Greg Ward <greg@gerg.ca>
parents:
15254
diff
changeset
|
378 if ((not os.path.exists(repo.wjoin(lfile)) or |
7ab05d752405
largefiles: cosmetics, whitespace, code style
Greg Ward <greg@gerg.ca>
parents:
15254
diff
changeset
|
379 expectedhash != lfutil.hashfile(repo.wjoin(lfile))) and |
7ab05d752405
largefiles: cosmetics, whitespace, code style
Greg Ward <greg@gerg.ca>
parents:
15254
diff
changeset
|
380 not lfutil.findfile(repo, expectedhash)): |
15168 | 381 toget.append((lfile, expectedhash)) |
382 | |
383 if toget: | |
384 store = basestore._openstore(repo) | |
385 ret = store.get(toget) | |
386 return ret | |
387 | |
388 return ([], []) | |
389 | |
390 def updatelfiles(ui, repo, filelist=None, printmessage=True): | |
391 wlock = repo.wlock() | |
392 try: | |
393 lfdirstate = lfutil.openlfdirstate(ui, repo) | |
394 lfiles = set(lfutil.listlfiles(repo)) | set(lfdirstate) | |
395 | |
396 if filelist is not None: | |
397 lfiles = [f for f in lfiles if f in filelist] | |
398 | |
399 printed = False | |
400 if printmessage and lfiles: | |
401 ui.status(_('getting changed largefiles\n')) | |
402 printed = True | |
403 cachelfiles(ui, repo, '.') | |
404 | |
405 updated, removed = 0, 0 | |
406 for i in map(lambda f: _updatelfile(repo, lfdirstate, f), lfiles): | |
407 # increment the appropriate counter according to _updatelfile's | |
408 # return value | |
409 updated += i > 0 and i or 0 | |
410 removed -= i < 0 and i or 0 | |
411 if printmessage and (removed or updated) and not printed: | |
412 ui.status(_('getting changed largefiles\n')) | |
413 printed = True | |
414 | |
415 lfdirstate.write() | |
416 if printed and printmessage: | |
417 ui.status(_('%d largefiles updated, %d removed\n') % (updated, | |
418 removed)) | |
419 finally: | |
420 wlock.release() | |
421 | |
422 def _updatelfile(repo, lfdirstate, lfile): | |
423 '''updates a single largefile and copies the state of its standin from | |
424 the repository's dirstate to its state in the lfdirstate. | |
425 | |
426 returns 1 if the file was modified, -1 if the file was removed, 0 if the | |
427 file was unchanged, and None if the needed largefile was missing from the | |
428 cache.''' | |
429 ret = 0 | |
430 abslfile = repo.wjoin(lfile) | |
431 absstandin = repo.wjoin(lfutil.standin(lfile)) | |
432 if os.path.exists(absstandin): | |
433 if os.path.exists(absstandin+'.orig'): | |
434 shutil.copyfile(abslfile, abslfile+'.orig') | |
435 expecthash = lfutil.readstandin(repo, lfile) | |
15255
7ab05d752405
largefiles: cosmetics, whitespace, code style
Greg Ward <greg@gerg.ca>
parents:
15254
diff
changeset
|
436 if (expecthash != '' and |
7ab05d752405
largefiles: cosmetics, whitespace, code style
Greg Ward <greg@gerg.ca>
parents:
15254
diff
changeset
|
437 (not os.path.exists(abslfile) or |
7ab05d752405
largefiles: cosmetics, whitespace, code style
Greg Ward <greg@gerg.ca>
parents:
15254
diff
changeset
|
438 expecthash != lfutil.hashfile(abslfile))): |
15168 | 439 if not lfutil.copyfromcache(repo, expecthash, lfile): |
440 return None # don't try to set the mode or update the dirstate | |
441 ret = 1 | |
442 mode = os.stat(absstandin).st_mode | |
443 if mode != os.stat(abslfile).st_mode: | |
444 os.chmod(abslfile, mode) | |
445 ret = 1 | |
446 else: | |
447 if os.path.exists(abslfile): | |
448 os.unlink(abslfile) | |
449 ret = -1 | |
450 state = repo.dirstate[lfutil.standin(lfile)] | |
451 if state == 'n': | |
452 lfdirstate.normal(lfile) | |
453 elif state == 'r': | |
454 lfdirstate.remove(lfile) | |
455 elif state == 'a': | |
456 lfdirstate.add(lfile) | |
457 elif state == '?': | |
15224
7c604d8c7e83
largefiles: remove pre-1.9 code from extension first bundled with 1.9
Na'Tosha Bard <natosha@unity3d.com>
parents:
15173
diff
changeset
|
458 lfdirstate.drop(lfile) |
15168 | 459 return ret |
460 | |
461 # -- hg commands declarations ------------------------------------------------ | |
462 | |
463 cmdtable = { | |
464 'lfconvert': (lfconvert, | |
15230 | 465 [('s', 'size', '', |
466 _('minimum size (MB) for files to be converted ' | |
467 'as largefiles'), | |
468 'SIZE'), | |
15332
0db47b8d025f
largefiles: rename lfconvert --tonormal option to --to-normal
Greg Ward <greg@gerg.ca>
parents:
15317
diff
changeset
|
469 ('', 'to-normal', False, |
15230 | 470 _('convert from a largefiles repo to a normal repo')), |
471 ], | |
15168 | 472 _('hg lfconvert SOURCE DEST [FILE ...]')), |
473 } |