Mercurial > hg
comparison mercurial/archival.py @ 43077:687b865b95ad
formatting: byteify all mercurial/ and hgext/ string literals
Done with
python3.7 contrib/byteify-strings.py -i $(hg files 'set:mercurial/**.py - mercurial/thirdparty/** + hgext/**.py - hgext/fsmonitor/pywatchman/** - mercurial/__init__.py')
black -l 80 -t py33 -S $(hg files 'set:**.py - mercurial/thirdparty/** - "contrib/python-zstandard/**" - hgext/fsmonitor/pywatchman/**')
# skip-blame mass-reformatting only
Differential Revision: https://phab.mercurial-scm.org/D6972
author | Augie Fackler <augie@google.com> |
---|---|
date | Sun, 06 Oct 2019 09:48:39 -0400 |
parents | 2372284d9457 |
children | eef9a2d67051 |
comparison
equal
deleted
inserted
replaced
43076:2372284d9457 | 43077:687b865b95ad |
---|---|
41 | 41 |
42 if prefix: | 42 if prefix: |
43 prefix = util.normpath(prefix) | 43 prefix = util.normpath(prefix) |
44 else: | 44 else: |
45 if not isinstance(dest, bytes): | 45 if not isinstance(dest, bytes): |
46 raise ValueError('dest must be string if no prefix') | 46 raise ValueError(b'dest must be string if no prefix') |
47 prefix = os.path.basename(dest) | 47 prefix = os.path.basename(dest) |
48 lower = prefix.lower() | 48 lower = prefix.lower() |
49 for sfx in exts.get(kind, []): | 49 for sfx in exts.get(kind, []): |
50 if lower.endswith(sfx): | 50 if lower.endswith(sfx): |
51 prefix = prefix[: -len(sfx)] | 51 prefix = prefix[: -len(sfx)] |
52 break | 52 break |
53 lpfx = os.path.normpath(util.localpath(prefix)) | 53 lpfx = os.path.normpath(util.localpath(prefix)) |
54 prefix = util.pconvert(lpfx) | 54 prefix = util.pconvert(lpfx) |
55 if not prefix.endswith('/'): | 55 if not prefix.endswith(b'/'): |
56 prefix += '/' | 56 prefix += b'/' |
57 # Drop the leading '.' path component if present, so Windows can read the | 57 # Drop the leading '.' path component if present, so Windows can read the |
58 # zip files (issue4634) | 58 # zip files (issue4634) |
59 if prefix.startswith('./'): | 59 if prefix.startswith(b'./'): |
60 prefix = prefix[2:] | 60 prefix = prefix[2:] |
61 if prefix.startswith('../') or os.path.isabs(lpfx) or '/../' in prefix: | 61 if prefix.startswith(b'../') or os.path.isabs(lpfx) or b'/../' in prefix: |
62 raise error.Abort(_('archive prefix contains illegal components')) | 62 raise error.Abort(_(b'archive prefix contains illegal components')) |
63 return prefix | 63 return prefix |
64 | 64 |
65 | 65 |
66 exts = { | 66 exts = { |
67 'tar': ['.tar'], | 67 b'tar': [b'.tar'], |
68 'tbz2': ['.tbz2', '.tar.bz2'], | 68 b'tbz2': [b'.tbz2', b'.tar.bz2'], |
69 'tgz': ['.tgz', '.tar.gz'], | 69 b'tgz': [b'.tgz', b'.tar.gz'], |
70 'zip': ['.zip'], | 70 b'zip': [b'.zip'], |
71 'txz': ['.txz', '.tar.xz'], | 71 b'txz': [b'.txz', b'.tar.xz'], |
72 } | 72 } |
73 | 73 |
74 | 74 |
75 def guesskind(dest): | 75 def guesskind(dest): |
76 for kind, extensions in exts.iteritems(): | 76 for kind, extensions in exts.iteritems(): |
106 def buildmetadata(ctx): | 106 def buildmetadata(ctx): |
107 '''build content of .hg_archival.txt''' | 107 '''build content of .hg_archival.txt''' |
108 repo = ctx.repo() | 108 repo = ctx.repo() |
109 | 109 |
110 opts = { | 110 opts = { |
111 'template': repo.ui.config( | 111 b'template': repo.ui.config( |
112 'experimental', 'archivemetatemplate', _defaultmetatemplate | 112 b'experimental', b'archivemetatemplate', _defaultmetatemplate |
113 ) | 113 ) |
114 } | 114 } |
115 | 115 |
116 out = util.stringio() | 116 out = util.stringio() |
117 | 117 |
118 fm = formatter.formatter(repo.ui, out, 'archive', opts) | 118 fm = formatter.formatter(repo.ui, out, b'archive', opts) |
119 fm.startitem() | 119 fm.startitem() |
120 fm.context(ctx=ctx) | 120 fm.context(ctx=ctx) |
121 fm.data(root=_rootctx(repo).hex()) | 121 fm.data(root=_rootctx(repo).hex()) |
122 | 122 |
123 if ctx.rev() is None: | 123 if ctx.rev() is None: |
124 dirty = '' | 124 dirty = b'' |
125 if ctx.dirty(missing=True): | 125 if ctx.dirty(missing=True): |
126 dirty = '+' | 126 dirty = b'+' |
127 fm.data(dirty=dirty) | 127 fm.data(dirty=dirty) |
128 fm.end() | 128 fm.end() |
129 | 129 |
130 return out.getvalue() | 130 return out.getvalue() |
131 | 131 |
144 else: | 144 else: |
145 self.timestamp = timestamp | 145 self.timestamp = timestamp |
146 gzip.GzipFile.__init__(self, *args, **kw) | 146 gzip.GzipFile.__init__(self, *args, **kw) |
147 | 147 |
148 def _write_gzip_header(self): | 148 def _write_gzip_header(self): |
149 self.fileobj.write('\037\213') # magic header | 149 self.fileobj.write(b'\037\213') # magic header |
150 self.fileobj.write('\010') # compression method | 150 self.fileobj.write(b'\010') # compression method |
151 fname = self.name | 151 fname = self.name |
152 if fname and fname.endswith('.gz'): | 152 if fname and fname.endswith(b'.gz'): |
153 fname = fname[:-3] | 153 fname = fname[:-3] |
154 flags = 0 | 154 flags = 0 |
155 if fname: | 155 if fname: |
156 flags = gzip.FNAME | 156 flags = gzip.FNAME |
157 self.fileobj.write(pycompat.bytechr(flags)) | 157 self.fileobj.write(pycompat.bytechr(flags)) |
158 gzip.write32u(self.fileobj, int(self.timestamp)) | 158 gzip.write32u(self.fileobj, int(self.timestamp)) |
159 self.fileobj.write('\002') | 159 self.fileobj.write(b'\002') |
160 self.fileobj.write('\377') | 160 self.fileobj.write(b'\377') |
161 if fname: | 161 if fname: |
162 self.fileobj.write(fname + '\000') | 162 self.fileobj.write(fname + b'\000') |
163 | 163 |
164 def __init__(self, dest, mtime, kind=''): | 164 def __init__(self, dest, mtime, kind=b''): |
165 self.mtime = mtime | 165 self.mtime = mtime |
166 self.fileobj = None | 166 self.fileobj = None |
167 | 167 |
168 def taropen(mode, name='', fileobj=None): | 168 def taropen(mode, name=b'', fileobj=None): |
169 if kind == 'gz': | 169 if kind == b'gz': |
170 mode = mode[0:1] | 170 mode = mode[0:1] |
171 if not fileobj: | 171 if not fileobj: |
172 fileobj = open(name, mode + 'b') | 172 fileobj = open(name, mode + b'b') |
173 gzfileobj = self.GzipFileWithTime( | 173 gzfileobj = self.GzipFileWithTime( |
174 name, | 174 name, |
175 pycompat.sysstr(mode + 'b'), | 175 pycompat.sysstr(mode + b'b'), |
176 zlib.Z_BEST_COMPRESSION, | 176 zlib.Z_BEST_COMPRESSION, |
177 fileobj, | 177 fileobj, |
178 timestamp=mtime, | 178 timestamp=mtime, |
179 ) | 179 ) |
180 self.fileobj = gzfileobj | 180 self.fileobj = gzfileobj |
183 ) | 183 ) |
184 else: | 184 else: |
185 return tarfile.open(name, pycompat.sysstr(mode + kind), fileobj) | 185 return tarfile.open(name, pycompat.sysstr(mode + kind), fileobj) |
186 | 186 |
187 if isinstance(dest, bytes): | 187 if isinstance(dest, bytes): |
188 self.z = taropen('w:', name=dest) | 188 self.z = taropen(b'w:', name=dest) |
189 else: | 189 else: |
190 self.z = taropen('w|', fileobj=dest) | 190 self.z = taropen(b'w|', fileobj=dest) |
191 | 191 |
192 def addfile(self, name, mode, islink, data): | 192 def addfile(self, name, mode, islink, data): |
193 name = pycompat.fsdecode(name) | 193 name = pycompat.fsdecode(name) |
194 i = tarfile.TarInfo(name) | 194 i = tarfile.TarInfo(name) |
195 i.mtime = self.mtime | 195 i.mtime = self.mtime |
244 i.external_attr = (mode | ftype) << 16 | 244 i.external_attr = (mode | ftype) << 16 |
245 # add "extended-timestamp" extra block, because zip archives | 245 # add "extended-timestamp" extra block, because zip archives |
246 # without this will be extracted with unexpected timestamp, | 246 # without this will be extracted with unexpected timestamp, |
247 # if TZ is not configured as GMT | 247 # if TZ is not configured as GMT |
248 i.extra += struct.pack( | 248 i.extra += struct.pack( |
249 '<hhBl', | 249 b'<hhBl', |
250 0x5455, # block type: "extended-timestamp" | 250 0x5455, # block type: "extended-timestamp" |
251 1 + 4, # size of this block | 251 1 + 4, # size of this block |
252 1, # "modification time is present" | 252 1, # "modification time is present" |
253 int(self.mtime), | 253 int(self.mtime), |
254 ) # last modification (UTC) | 254 ) # last modification (UTC) |
268 | 268 |
269 def addfile(self, name, mode, islink, data): | 269 def addfile(self, name, mode, islink, data): |
270 if islink: | 270 if islink: |
271 self.opener.symlink(data, name) | 271 self.opener.symlink(data, name) |
272 return | 272 return |
273 f = self.opener(name, "w", atomictemp=False) | 273 f = self.opener(name, b"w", atomictemp=False) |
274 f.write(data) | 274 f.write(data) |
275 f.close() | 275 f.close() |
276 destfile = os.path.join(self.basedir, name) | 276 destfile = os.path.join(self.basedir, name) |
277 os.chmod(destfile, mode) | 277 os.chmod(destfile, mode) |
278 if self.mtime is not None: | 278 if self.mtime is not None: |
281 def done(self): | 281 def done(self): |
282 pass | 282 pass |
283 | 283 |
284 | 284 |
285 archivers = { | 285 archivers = { |
286 'files': fileit, | 286 b'files': fileit, |
287 'tar': tarit, | 287 b'tar': tarit, |
288 'tbz2': lambda name, mtime: tarit(name, mtime, 'bz2'), | 288 b'tbz2': lambda name, mtime: tarit(name, mtime, b'bz2'), |
289 'tgz': lambda name, mtime: tarit(name, mtime, 'gz'), | 289 b'tgz': lambda name, mtime: tarit(name, mtime, b'gz'), |
290 'txz': lambda name, mtime: tarit(name, mtime, 'xz'), | 290 b'txz': lambda name, mtime: tarit(name, mtime, b'xz'), |
291 'uzip': lambda name, mtime: zipit(name, mtime, False), | 291 b'uzip': lambda name, mtime: zipit(name, mtime, False), |
292 'zip': zipit, | 292 b'zip': zipit, |
293 } | 293 } |
294 | 294 |
295 | 295 |
296 def archive( | 296 def archive( |
297 repo, | 297 repo, |
298 dest, | 298 dest, |
299 node, | 299 node, |
300 kind, | 300 kind, |
301 decode=True, | 301 decode=True, |
302 match=None, | 302 match=None, |
303 prefix='', | 303 prefix=b'', |
304 mtime=None, | 304 mtime=None, |
305 subrepos=False, | 305 subrepos=False, |
306 ): | 306 ): |
307 '''create archive of repo as it was at node. | 307 '''create archive of repo as it was at node. |
308 | 308 |
321 mtime is the modified time, in seconds, or None to use the changeset time. | 321 mtime is the modified time, in seconds, or None to use the changeset time. |
322 | 322 |
323 subrepos tells whether to include subrepos. | 323 subrepos tells whether to include subrepos. |
324 ''' | 324 ''' |
325 | 325 |
326 if kind == 'txz' and not pycompat.ispy3: | 326 if kind == b'txz' and not pycompat.ispy3: |
327 raise error.Abort(_('xz compression is only available in Python 3')) | 327 raise error.Abort(_(b'xz compression is only available in Python 3')) |
328 | 328 |
329 if kind == 'files': | 329 if kind == b'files': |
330 if prefix: | 330 if prefix: |
331 raise error.Abort(_('cannot give prefix when archiving to files')) | 331 raise error.Abort(_(b'cannot give prefix when archiving to files')) |
332 else: | 332 else: |
333 prefix = tidyprefix(dest, kind, prefix) | 333 prefix = tidyprefix(dest, kind, prefix) |
334 | 334 |
335 def write(name, mode, islink, getdata): | 335 def write(name, mode, islink, getdata): |
336 data = getdata() | 336 data = getdata() |
337 if decode: | 337 if decode: |
338 data = repo.wwritedata(name, data) | 338 data = repo.wwritedata(name, data) |
339 archiver.addfile(prefix + name, mode, islink, data) | 339 archiver.addfile(prefix + name, mode, islink, data) |
340 | 340 |
341 if kind not in archivers: | 341 if kind not in archivers: |
342 raise error.Abort(_("unknown archive type '%s'") % kind) | 342 raise error.Abort(_(b"unknown archive type '%s'") % kind) |
343 | 343 |
344 ctx = repo[node] | 344 ctx = repo[node] |
345 archiver = archivers[kind](dest, mtime or ctx.date()[0]) | 345 archiver = archivers[kind](dest, mtime or ctx.date()[0]) |
346 | 346 |
347 if not match: | 347 if not match: |
348 match = scmutil.matchall(repo) | 348 match = scmutil.matchall(repo) |
349 | 349 |
350 if repo.ui.configbool("ui", "archivemeta"): | 350 if repo.ui.configbool(b"ui", b"archivemeta"): |
351 name = '.hg_archival.txt' | 351 name = b'.hg_archival.txt' |
352 if match(name): | 352 if match(name): |
353 write(name, 0o644, False, lambda: buildmetadata(ctx)) | 353 write(name, 0o644, False, lambda: buildmetadata(ctx)) |
354 | 354 |
355 files = [f for f in ctx.manifest().matches(match)] | 355 files = [f for f in ctx.manifest().matches(match)] |
356 total = len(files) | 356 total = len(files) |
358 files.sort() | 358 files.sort() |
359 scmutil.prefetchfiles( | 359 scmutil.prefetchfiles( |
360 repo, [ctx.rev()], scmutil.matchfiles(repo, files) | 360 repo, [ctx.rev()], scmutil.matchfiles(repo, files) |
361 ) | 361 ) |
362 progress = repo.ui.makeprogress( | 362 progress = repo.ui.makeprogress( |
363 _('archiving'), unit=_('files'), total=total | 363 _(b'archiving'), unit=_(b'files'), total=total |
364 ) | 364 ) |
365 progress.update(0) | 365 progress.update(0) |
366 for f in files: | 366 for f in files: |
367 ff = ctx.flags(f) | 367 ff = ctx.flags(f) |
368 write(f, 'x' in ff and 0o755 or 0o644, 'l' in ff, ctx[f].data) | 368 write(f, b'x' in ff and 0o755 or 0o644, b'l' in ff, ctx[f].data) |
369 progress.increment(item=f) | 369 progress.increment(item=f) |
370 progress.complete() | 370 progress.complete() |
371 | 371 |
372 if subrepos: | 372 if subrepos: |
373 for subpath in sorted(ctx.substate): | 373 for subpath in sorted(ctx.substate): |
374 sub = ctx.workingsub(subpath) | 374 sub = ctx.workingsub(subpath) |
375 submatch = matchmod.subdirmatcher(subpath, match) | 375 submatch = matchmod.subdirmatcher(subpath, match) |
376 subprefix = prefix + subpath + '/' | 376 subprefix = prefix + subpath + b'/' |
377 total += sub.archive(archiver, subprefix, submatch, decode) | 377 total += sub.archive(archiver, subprefix, submatch, decode) |
378 | 378 |
379 if total == 0: | 379 if total == 0: |
380 raise error.Abort(_('no files match the archive pattern')) | 380 raise error.Abort(_(b'no files match the archive pattern')) |
381 | 381 |
382 archiver.done() | 382 archiver.done() |
383 return total | 383 return total |