Mercurial > hg
view hgext/infinitepush/bundleparts.py @ 43085:eef9a2d67051
py3: manually import pycompat.open into files that need it
We want to eliminate the source transformer. Currently it inserts
a `from mercurial.pycompat import ...` at the top of files to alias
some builtins.
This commit replaces the implicit import of `open` with an explicit
import on files that need it and changes the source transformer to
no longer import `open`.
As part of this, we needed to store an explicit local for `open` in
the Python 2 code path in `pycompat` so the import works. (Builtins
that are automatically in scope cannot be imported.)
Differential Revision: https://phab.mercurial-scm.org/D7005
author | Gregory Szorc <gregory.szorc@gmail.com> |
---|---|
date | Sun, 06 Oct 2019 13:28:56 -0400 |
parents | 687b865b95ad |
children | 649d3ac37a12 |
line wrap: on
line source
# Copyright 2017 Facebook, Inc. # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. from __future__ import absolute_import from mercurial.i18n import _ from mercurial import ( bundle2, changegroup, error, extensions, node as nodemod, revsetlang, util, ) from . import common isremotebooksenabled = common.isremotebooksenabled scratchbranchparttype = b'b2x:infinitepush' def getscratchbranchparts(repo, peer, outgoing, ui, bookmark): if not outgoing.missing: raise error.Abort(_(b'no commits to push')) if scratchbranchparttype not in bundle2.bundle2caps(peer): raise error.Abort( _(b'no server support for %r') % scratchbranchparttype ) _validaterevset( repo, revsetlang.formatspec(b'%ln', outgoing.missing), bookmark ) supportedversions = changegroup.supportedoutgoingversions(repo) # Explicitly avoid using '01' changegroup version in infinitepush to # support general delta supportedversions.discard(b'01') cgversion = min(supportedversions) _handlelfs(repo, outgoing.missing) cg = changegroup.makestream(repo, outgoing, cgversion, b'push') params = {} params[b'cgversion'] = cgversion if bookmark: params[b'bookmark'] = bookmark # 'prevbooknode' is necessary for pushkey reply part params[b'bookprevnode'] = b'' bookmarks = repo._bookmarks if bookmark in bookmarks: params[b'bookprevnode'] = nodemod.hex(bookmarks[bookmark]) # Do not send pushback bundle2 part with bookmarks if remotenames extension # is enabled. It will be handled manually in `_push()` if not isremotebooksenabled(ui): params[b'pushbackbookmarks'] = b'1' parts = [] # .upper() marks this as a mandatory part: server will abort if there's no # handler parts.append( bundle2.bundlepart( scratchbranchparttype.upper(), advisoryparams=params.iteritems(), data=cg, ) ) return parts def _validaterevset(repo, revset, bookmark): """Abort if the revs to be pushed aren't valid for a scratch branch.""" if not repo.revs(revset): raise error.Abort(_(b'nothing to push')) if bookmark: # Allow bundle with many heads only if no bookmark is specified heads = repo.revs(b'heads(%r)', revset) if len(heads) > 1: raise error.Abort( _(b'cannot push more than one head to a scratch branch') ) def _handlelfs(repo, missing): '''Special case if lfs is enabled If lfs is enabled then we need to call prepush hook to make sure large files are uploaded to lfs ''' try: lfsmod = extensions.find(b'lfs') lfsmod.wrapper.uploadblobsfromrevs(repo, missing) except KeyError: # Ignore if lfs extension is not enabled return class copiedpart(object): """a copy of unbundlepart content that can be consumed later""" def __init__(self, part): # copy "public properties" self.type = part.type self.id = part.id self.mandatory = part.mandatory self.mandatoryparams = part.mandatoryparams self.advisoryparams = part.advisoryparams self.params = part.params self.mandatorykeys = part.mandatorykeys # copy the buffer self._io = util.stringio(part.read()) def consume(self): return def read(self, size=None): if size is None: return self._io.read() else: return self._io.read(size)