tests/phabricator/phabread-multi-drev.json
author Manuel Jacob <me@manueljacob.de>
Wed, 15 Jul 2020 11:38:54 +0200
changeset 45143 5631b0116374
parent 44579 a7f8c657a3f0
permissions -rw-r--r--
discovery: fix docstring of `outgoing` class Also, introduce a more correct name `ancestorsof` for what was named `missingheads` before. For now, we just forward `ancestorsof` to `missingheads` until all users are changed. There were some mistakes in the old docstring / name: * `missingheads` (new name: `ancestorsof`) contains the revs whose ancestors are included in the outgoing operation. It may contain non-head revs and revs which are already on the remote, so the name "missingheads" is wrong in two ways. * `missing` contains only ancestors of `missingheads`, so not *all nodes* present in local but not in remote. * `common` might not contain all common revs, e.g. not some that are not an ancestor of `missingheads`. It seems like the misleading name have fostered an actual bug (issue6372), where `outgoing.missingheads` was used assuming that it contains the heads of the missing changesets.

{
    "version": 1, 
    "interactions": [
        {
            "response": {
                "body": {
                    "string": "{\"result\":[{\"id\":\"8207\",\"phid\":\"PHID-DREV-2cgovej5wkjco3xjcqta\",\"title\":\"phabricator: pass ui instead of repo to `userphids()`\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/D8207\",\"dateCreated\":\"1583259903\",\"dateModified\":\"1583348836\",\"authorPHID\":\"PHID-USER-tzhaient733lwrlbcag5\",\"status\":\"3\",\"statusName\":\"Closed\",\"properties\":{\"draft.broadcast\":true,\"lines.added\":7,\"lines.removed\":4,\"wasAcceptedBeforeClose\":true},\"branch\":null,\"summary\":\"Also not a repository operation.\",\"testPlan\":\"\",\"lineCount\":\"11\",\"activeDiffPHID\":\"PHID-DIFF-wzbsydozxy3nv2k6q4nd\",\"diffs\":[\"20443\",\"20423\"],\"commits\":[\"PHID-CMIT-o75v5xkiwt7t4qsjdhhw\",\"PHID-CMIT-4od7afhqygglq77yjjbr\"],\"reviewers\":{\"PHID-PROJ-3dvcxzznrjru2xmmses3\":\"PHID-PROJ-3dvcxzznrjru2xmmses3\",\"PHID-USER-34jnztnonbr4lhwuybwl\":\"PHID-USER-34jnztnonbr4lhwuybwl\"},\"ccs\":[\"PHID-USER-5iy6mkoveguhm2zthvww\",\"PHID-USER-q42dn7cc3donqriafhjx\"],\"hashes\":[[\"hgcm\",\"\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\"]],\"auxiliary\":{\"phabricator:projects\":[],\"phabricator:depends-on\":[\"PHID-DREV-gtbyd4t7mjnm4i3erun5\"]},\"repositoryPHID\":\"PHID-REPO-bvunnehri4u2isyr7bc3\",\"sourcePath\":null},{\"id\":\"8206\",\"phid\":\"PHID-DREV-gtbyd4t7mjnm4i3erun5\",\"title\":\"phabricator: pass ui instead of repo to `querydrev()`\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/D8206\",\"dateCreated\":\"1583259900\",\"dateModified\":\"1583348835\",\"authorPHID\":\"PHID-USER-tzhaient733lwrlbcag5\",\"status\":\"3\",\"statusName\":\"Closed\",\"properties\":{\"draft.broadcast\":true,\"lines.added\":6,\"lines.removed\":6,\"wasAcceptedBeforeClose\":true},\"branch\":null,\"summary\":\"Also not a repository operation.\",\"testPlan\":\"\",\"lineCount\":\"12\",\"activeDiffPHID\":\"PHID-DIFF-shmhfs2exdg7ituxbt22\",\"diffs\":[\"20442\",\"20422\"],\"commits\":[\"PHID-CMIT-66dzbf7lma7m2ri62tfl\",\"PHID-CMIT-2su6m35fsf32mblyi2ad\"],\"reviewers\":{\"PHID-PROJ-3dvcxzznrjru2xmmses3\":\"PHID-PROJ-3dvcxzznrjru2xmmses3\",\"PHID-USER-34jnztnonbr4lhwuybwl\":\"PHID-USER-34jnztnonbr4lhwuybwl\"},\"ccs\":[\"PHID-USER-5iy6mkoveguhm2zthvww\",\"PHID-USER-q42dn7cc3donqriafhjx\"],\"hashes\":[[\"hgcm\",\"\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\"]],\"auxiliary\":{\"phabricator:projects\":[],\"phabricator:depends-on\":[\"PHID-DREV-42xnmk3odcdz2lwckiym\"]},\"repositoryPHID\":\"PHID-REPO-bvunnehri4u2isyr7bc3\",\"sourcePath\":null},{\"id\":\"8205\",\"phid\":\"PHID-DREV-42xnmk3odcdz2lwckiym\",\"title\":\"phabricator: pass ui instead of repo to `readpatch()`\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/D8205\",\"dateCreated\":\"1583259897\",\"dateModified\":\"1583348832\",\"authorPHID\":\"PHID-USER-tzhaient733lwrlbcag5\",\"status\":\"3\",\"statusName\":\"Closed\",\"properties\":{\"draft.broadcast\":true,\"lines.added\":5,\"lines.removed\":7,\"wasAcceptedBeforeClose\":true},\"branch\":null,\"summary\":\"This makes it a little clearer that it isn't a repository operation.\",\"testPlan\":\"\",\"lineCount\":\"12\",\"activeDiffPHID\":\"PHID-DIFF-atzhtzu6avavi6uevt3n\",\"diffs\":[\"20441\",\"20421\"],\"commits\":[\"PHID-CMIT-wtocju4a33qnh7jwy7on\",\"PHID-CMIT-e3dyltz277hhalnoum4m\"],\"reviewers\":{\"PHID-PROJ-3dvcxzznrjru2xmmses3\":\"PHID-PROJ-3dvcxzznrjru2xmmses3\",\"PHID-USER-34jnztnonbr4lhwuybwl\":\"PHID-USER-34jnztnonbr4lhwuybwl\"},\"ccs\":[\"PHID-USER-5iy6mkoveguhm2zthvww\",\"PHID-USER-q42dn7cc3donqriafhjx\"],\"hashes\":[[\"hgcm\",\"\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\"]],\"auxiliary\":{\"phabricator:projects\":[],\"phabricator:depends-on\":[]},\"repositoryPHID\":\"PHID-REPO-bvunnehri4u2isyr7bc3\",\"sourcePath\":null}],\"error_code\":null,\"error_info\":null}"
                }, 
                "headers": {
                    "x-frame-options": [
                        "Deny"
                    ], 
                    "strict-transport-security": [
                        "max-age=0; includeSubdomains; preload"
                    ], 
                    "transfer-encoding": [
                        "chunked"
                    ], 
                    "cache-control": [
                        "no-store"
                    ], 
                    "referrer-policy": [
                        "no-referrer"
                    ], 
                    "x-content-type-options": [
                        "nosniff"
                    ], 
                    "x-xss-protection": [
                        "1; mode=block"
                    ], 
                    "server": [
                        "Apache/2.4.10 (Debian)"
                    ], 
                    "date": [
                        "Wed, 04 Mar 2020 22:05:21 GMT"
                    ], 
                    "content-type": [
                        "application/json"
                    ], 
                    "expires": [
                        "Sat, 01 Jan 2000 00:00:00 GMT"
                    ]
                }, 
                "status": {
                    "message": "OK", 
                    "code": 200
                }
            }, 
            "request": {
                "body": "params=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22ids%22%3A+%5B8205%2C+8206%2C+8207%5D%7D&output=json&__conduit__=1", 
                "uri": "https://phab.mercurial-scm.org//api/differential.query", 
                "headers": {
                    "content-length": [
                        "162"
                    ], 
                    "accept": [
                        "application/mercurial-0.1"
                    ], 
                    "host": [
                        "phab.mercurial-scm.org"
                    ], 
                    "user-agent": [
                        "mercurial/proto-1.0 (Mercurial 5.3+33-bb58931d0c4f)"
                    ], 
                    "content-type": [
                        "application/x-www-form-urlencoded"
                    ]
                }, 
                "method": "POST"
            }
        }, 
        {
            "response": {
                "body": {
                    "string": "{\"result\":{\"20443\":{\"id\":\"20443\",\"revisionID\":\"8207\",\"dateCreated\":\"1581964120\",\"dateModified\":\"1583327828\",\"sourceControlBaseRevision\":\"9b46270917348950e3fb1e73a5c9e46038065622\",\"sourceControlPath\":null,\"sourceControlSystem\":\"hg\",\"branch\":null,\"bookmark\":null,\"creationMethod\":\"commit\",\"description\":\"rHGa271ef1de08664a9ee4a286711681377875ca2a2\",\"unitStatus\":\"6\",\"lintStatus\":\"6\",\"changes\":[{\"id\":\"55598\",\"metadata\":{\"line:first\":1043,\"hash.effect\":\".HmDk8vnow9e\"},\"oldPath\":\"hgext\\/phabricator.py\",\"currentPath\":\"hgext\\/phabricator.py\",\"awayPaths\":[],\"oldProperties\":[],\"newProperties\":[],\"type\":\"2\",\"fileType\":\"1\",\"commitHash\":null,\"addLines\":\"7\",\"delLines\":\"4\",\"hunks\":[{\"oldOffset\":\"1\",\"newOffset\":\"1\",\"oldLength\":\"1799\",\"newLength\":\"1802\",\"addLines\":null,\"delLines\":null,\"isMissingOldNewline\":null,\"isMissingNewNewline\":null,\"corpus\":\" # phabricator.py - simple Phabricator integration\\n #\\n # Copyright 2017 Facebook, Inc.\\n #\\n # This software may be used and distributed according to the terms of the\\n # GNU General Public License version 2 or any later version.\\n \\\"\\\"\\\"simple Phabricator integration (EXPERIMENTAL)\\n \\n This extension provides a ``phabsend`` command which sends a stack of\\n changesets to Phabricator, and a ``phabread`` command which prints a stack of\\n revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command\\n to update statuses in batch.\\n \\n A \\\"phabstatus\\\" view for :hg:`show` is also provided; it displays status\\n information of Phabricator differentials associated with unfinished\\n changesets.\\n \\n By default, Phabricator requires ``Test Plan`` which might prevent some\\n changeset from being sent. The requirement could be disabled by changing\\n ``differential.require-test-plan-field`` config server side.\\n \\n Config::\\n \\n     [phabricator]\\n     # Phabricator URL\\n     url = https:\\/\\/phab.example.com\\/\\n \\n     # Repo callsign. If a repo has a URL https:\\/\\/$HOST\\/diffusion\\/FOO, then its\\n     # callsign is \\\"FOO\\\".\\n     callsign = FOO\\n \\n     # curl command to use. If not set (default), use builtin HTTP library to\\n     # communicate. If set, use the specified curl command. This could be useful\\n     # if you need to specify advanced options that is not easily supported by\\n     # the internal library.\\n     curlcmd = curl --connect-timeout 2 --retry 3 --silent\\n \\n     [auth]\\n     example.schemes = https\\n     example.prefix = phab.example.com\\n \\n     # API token. Get it from https:\\/\\/$HOST\\/conduit\\/login\\/\\n     example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx\\n \\\"\\\"\\\"\\n \\n from __future__ import absolute_import\\n \\n import base64\\n import contextlib\\n import hashlib\\n import itertools\\n import json\\n import mimetypes\\n import operator\\n import re\\n \\n from mercurial.node import bin, nullid\\n from mercurial.i18n import _\\n from mercurial.pycompat import getattr\\n from mercurial.thirdparty import attr\\n from mercurial import (\\n     cmdutil,\\n     context,\\n     encoding,\\n     error,\\n     exthelper,\\n     graphmod,\\n     httpconnection as httpconnectionmod,\\n     localrepo,\\n     logcmdutil,\\n     match,\\n     mdiff,\\n     obsutil,\\n     parser,\\n     patch,\\n     phases,\\n     pycompat,\\n     scmutil,\\n     smartset,\\n     tags,\\n     templatefilters,\\n     templateutil,\\n     url as urlmod,\\n     util,\\n )\\n from mercurial.utils import (\\n     procutil,\\n     stringutil,\\n )\\n from . import show\\n \\n \\n # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for\\n # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should\\n # be specifying the version(s) of Mercurial they are tested with, or\\n # leave the attribute unspecified.\\n testedwith = b'ships-with-hg-core'\\n \\n eh = exthelper.exthelper()\\n \\n cmdtable = eh.cmdtable\\n command = eh.command\\n configtable = eh.configtable\\n templatekeyword = eh.templatekeyword\\n uisetup = eh.finaluisetup\\n \\n # developer config: phabricator.batchsize\\n eh.configitem(\\n     b'phabricator', b'batchsize', default=12,\\n )\\n eh.configitem(\\n     b'phabricator', b'callsign', default=None,\\n )\\n eh.configitem(\\n     b'phabricator', b'curlcmd', default=None,\\n )\\n # developer config: phabricator.repophid\\n eh.configitem(\\n     b'phabricator', b'repophid', default=None,\\n )\\n eh.configitem(\\n     b'phabricator', b'url', default=None,\\n )\\n eh.configitem(\\n     b'phabsend', b'confirm', default=False,\\n )\\n \\n colortable = {\\n     b'phabricator.action.created': b'green',\\n     b'phabricator.action.skipped': b'magenta',\\n     b'phabricator.action.updated': b'magenta',\\n     b'phabricator.desc': b'',\\n     b'phabricator.drev': b'bold',\\n     b'phabricator.node': b'',\\n     b'phabricator.status.abandoned': b'magenta dim',\\n     b'phabricator.status.accepted': b'green bold',\\n     b'phabricator.status.closed': b'green',\\n     b'phabricator.status.needsreview': b'yellow',\\n     b'phabricator.status.needsrevision': b'red',\\n     b'phabricator.status.changesplanned': b'red',\\n }\\n \\n _VCR_FLAGS = [\\n     (\\n         b'',\\n         b'test-vcr',\\n         b'',\\n         _(\\n             b'Path to a vcr file. If nonexistent, will record a new vcr transcript'\\n             b', otherwise will mock all http requests using the specified vcr file.'\\n             b' (ADVANCED)'\\n         ),\\n     ),\\n ]\\n \\n \\n @eh.wrapfunction(localrepo, \\\"loadhgrc\\\")\\n def _loadhgrc(orig, ui, wdirvfs, hgvfs, requirements):\\n     \\\"\\\"\\\"Load ``.arcconfig`` content into a ui instance on repository open.\\n     \\\"\\\"\\\"\\n     result = False\\n     arcconfig = {}\\n \\n     try:\\n         # json.loads only accepts bytes from 3.6+\\n         rawparams = encoding.unifromlocal(wdirvfs.read(b\\\".arcconfig\\\"))\\n         # json.loads only returns unicode strings\\n         arcconfig = pycompat.rapply(\\n             lambda x: encoding.unitolocal(x)\\n             if isinstance(x, pycompat.unicode)\\n             else x,\\n             pycompat.json_loads(rawparams),\\n         )\\n \\n         result = True\\n     except ValueError:\\n         ui.warn(_(b\\\"invalid JSON in %s\\\\n\\\") % wdirvfs.join(b\\\".arcconfig\\\"))\\n     except IOError:\\n         pass\\n \\n     cfg = util.sortdict()\\n \\n     if b\\\"repository.callsign\\\" in arcconfig:\\n         cfg[(b\\\"phabricator\\\", b\\\"callsign\\\")] = arcconfig[b\\\"repository.callsign\\\"]\\n \\n     if b\\\"phabricator.uri\\\" in arcconfig:\\n         cfg[(b\\\"phabricator\\\", b\\\"url\\\")] = arcconfig[b\\\"phabricator.uri\\\"]\\n \\n     if cfg:\\n         ui.applyconfig(cfg, source=wdirvfs.join(b\\\".arcconfig\\\"))\\n \\n     return orig(ui, wdirvfs, hgvfs, requirements) or result  # Load .hg\\/hgrc\\n \\n \\n def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):\\n     fullflags = flags + _VCR_FLAGS\\n \\n     def hgmatcher(r1, r2):\\n         if r1.uri != r2.uri or r1.method != r2.method:\\n             return False\\n         r1params = util.urlreq.parseqs(r1.body)\\n         r2params = util.urlreq.parseqs(r2.body)\\n         for key in r1params:\\n             if key not in r2params:\\n                 return False\\n             value = r1params[key][0]\\n             # we want to compare json payloads without worrying about ordering\\n             if value.startswith(b'{') and value.endswith(b'}'):\\n                 r1json = pycompat.json_loads(value)\\n                 r2json = pycompat.json_loads(r2params[key][0])\\n                 if r1json != r2json:\\n                     return False\\n             elif r2params[key][0] != value:\\n                 return False\\n         return True\\n \\n     def sanitiserequest(request):\\n         request.body = re.sub(\\n             br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body\\n         )\\n         return request\\n \\n     def sanitiseresponse(response):\\n         if 'set-cookie' in response['headers']:\\n             del response['headers']['set-cookie']\\n         return response\\n \\n     def decorate(fn):\\n         def inner(*args, **kwargs):\\n             cassette = pycompat.fsdecode(kwargs.pop('test_vcr', None))\\n             if cassette:\\n                 import hgdemandimport\\n \\n                 with hgdemandimport.deactivated():\\n                     import vcr as vcrmod\\n                     import vcr.stubs as stubs\\n \\n                     vcr = vcrmod.VCR(\\n                         serializer='json',\\n                         before_record_request=sanitiserequest,\\n                         before_record_response=sanitiseresponse,\\n                         custom_patches=[\\n                             (\\n                                 urlmod,\\n                                 'httpconnection',\\n                                 stubs.VCRHTTPConnection,\\n                             ),\\n                             (\\n                                 urlmod,\\n                                 'httpsconnection',\\n                                 stubs.VCRHTTPSConnection,\\n                             ),\\n                         ],\\n                     )\\n                     vcr.register_matcher('hgmatcher', hgmatcher)\\n                     with vcr.use_cassette(cassette, match_on=['hgmatcher']):\\n                         return fn(*args, **kwargs)\\n             return fn(*args, **kwargs)\\n \\n         inner.__name__ = fn.__name__\\n         inner.__doc__ = fn.__doc__\\n         return command(\\n             name,\\n             fullflags,\\n             spec,\\n             helpcategory=helpcategory,\\n             optionalrepo=optionalrepo,\\n         )(inner)\\n \\n     return decorate\\n \\n \\n def urlencodenested(params):\\n     \\\"\\\"\\\"like urlencode, but works with nested parameters.\\n \\n     For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be\\n     flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to\\n     urlencode. Note: the encoding is consistent with PHP's http_build_query.\\n     \\\"\\\"\\\"\\n     flatparams = util.sortdict()\\n \\n     def process(prefix, obj):\\n         if isinstance(obj, bool):\\n             obj = {True: b'true', False: b'false'}[obj]  # Python -\\u003e PHP form\\n         lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]\\n         items = {list: lister, dict: lambda x: x.items()}.get(type(obj))\\n         if items is None:\\n             flatparams[prefix] = obj\\n         else:\\n             for k, v in items(obj):\\n                 if prefix:\\n                     process(b'%s[%s]' % (prefix, k), v)\\n                 else:\\n                     process(k, v)\\n \\n     process(b'', params)\\n     return util.urlreq.urlencode(flatparams)\\n \\n \\n def readurltoken(ui):\\n     \\\"\\\"\\\"return conduit url, token and make sure they exist\\n \\n     Currently read from [auth] config section. In the future, it might\\n     make sense to read from .arcconfig and .arcrc as well.\\n     \\\"\\\"\\\"\\n     url = ui.config(b'phabricator', b'url')\\n     if not url:\\n         raise error.Abort(\\n             _(b'config %s.%s is required') % (b'phabricator', b'url')\\n         )\\n \\n     res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)\\n     token = None\\n \\n     if res:\\n         group, auth = res\\n \\n         ui.debug(b\\\"using auth.%s.* for authentication\\\\n\\\" % group)\\n \\n         token = auth.get(b'phabtoken')\\n \\n     if not token:\\n         raise error.Abort(\\n             _(b'Can\\\\'t find conduit token associated to %s') % (url,)\\n         )\\n \\n     return url, token\\n \\n \\n def callconduit(ui, name, params):\\n     \\\"\\\"\\\"call Conduit API, params is a dict. return json.loads result, or None\\\"\\\"\\\"\\n     host, token = readurltoken(ui)\\n     url, authinfo = util.url(b'\\/'.join([host, b'api', name])).authinfo()\\n     ui.debug(b'Conduit Call: %s %s\\\\n' % (url, pycompat.byterepr(params)))\\n     params = params.copy()\\n     params[b'__conduit__'] = {\\n         b'token': token,\\n     }\\n     rawdata = {\\n         b'params': templatefilters.json(params),\\n         b'output': b'json',\\n         b'__conduit__': 1,\\n     }\\n     data = urlencodenested(rawdata)\\n     curlcmd = ui.config(b'phabricator', b'curlcmd')\\n     if curlcmd:\\n         sin, sout = procutil.popen2(\\n             b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))\\n         )\\n         sin.write(data)\\n         sin.close()\\n         body = sout.read()\\n     else:\\n         urlopener = urlmod.opener(ui, authinfo)\\n         request = util.urlreq.request(pycompat.strurl(url), data=data)\\n         with contextlib.closing(urlopener.open(request)) as rsp:\\n             body = rsp.read()\\n     ui.debug(b'Conduit Response: %s\\\\n' % body)\\n     parsed = pycompat.rapply(\\n         lambda x: encoding.unitolocal(x)\\n         if isinstance(x, pycompat.unicode)\\n         else x,\\n         # json.loads only accepts bytes from py3.6+\\n         pycompat.json_loads(encoding.unifromlocal(body)),\\n     )\\n     if parsed.get(b'error_code'):\\n         msg = _(b'Conduit Error (%s): %s') % (\\n             parsed[b'error_code'],\\n             parsed[b'error_info'],\\n         )\\n         raise error.Abort(msg)\\n     return parsed[b'result']\\n \\n \\n @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)\\n def debugcallconduit(ui, repo, name):\\n     \\\"\\\"\\\"call Conduit API\\n \\n     Call parameters are read from stdin as a JSON blob. Result will be written\\n     to stdout as a JSON blob.\\n     \\\"\\\"\\\"\\n     # json.loads only accepts bytes from 3.6+\\n     rawparams = encoding.unifromlocal(ui.fin.read())\\n     # json.loads only returns unicode strings\\n     params = pycompat.rapply(\\n         lambda x: encoding.unitolocal(x)\\n         if isinstance(x, pycompat.unicode)\\n         else x,\\n         pycompat.json_loads(rawparams),\\n     )\\n     # json.dumps only accepts unicode strings\\n     result = pycompat.rapply(\\n         lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,\\n         callconduit(ui, name, params),\\n     )\\n     s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))\\n     ui.write(b'%s\\\\n' % encoding.unitolocal(s))\\n \\n \\n def getrepophid(repo):\\n     \\\"\\\"\\\"given callsign, return repository PHID or None\\\"\\\"\\\"\\n     # developer config: phabricator.repophid\\n     repophid = repo.ui.config(b'phabricator', b'repophid')\\n     if repophid:\\n         return repophid\\n     callsign = repo.ui.config(b'phabricator', b'callsign')\\n     if not callsign:\\n         return None\\n     query = callconduit(\\n         repo.ui,\\n         b'diffusion.repository.search',\\n         {b'constraints': {b'callsigns': [callsign]}},\\n     )\\n     if len(query[b'data']) == 0:\\n         return None\\n     repophid = query[b'data'][0][b'phid']\\n     repo.ui.setconfig(b'phabricator', b'repophid', repophid)\\n     return repophid\\n \\n \\n _differentialrevisiontagre = re.compile(br'\\\\AD([1-9][0-9]*)\\\\Z')\\n _differentialrevisiondescre = re.compile(\\n     br'^Differential Revision:\\\\s*(?P\\u003curl\\u003e(?:.*)D(?P\\u003cid\\u003e[1-9][0-9]*))$', re.M\\n )\\n \\n \\n def getoldnodedrevmap(repo, nodelist):\\n     \\\"\\\"\\\"find previous nodes that has been sent to Phabricator\\n \\n     return {node: (oldnode, Differential diff, Differential Revision ID)}\\n     for node in nodelist with known previous sent versions, or associated\\n     Differential Revision IDs. ``oldnode`` and ``Differential diff`` could\\n     be ``None``.\\n \\n     Examines commit messages like \\\"Differential Revision:\\\" to get the\\n     association information.\\n \\n     If such commit message line is not found, examines all precursors and their\\n     tags. Tags with format like \\\"D1234\\\" are considered a match and the node\\n     with that tag, and the number after \\\"D\\\" (ex. 1234) will be returned.\\n \\n     The ``old node``, if not None, is guaranteed to be the last diff of\\n     corresponding Differential Revision, and exist in the repo.\\n     \\\"\\\"\\\"\\n     unfi = repo.unfiltered()\\n     has_node = unfi.changelog.index.has_node\\n \\n     result = {}  # {node: (oldnode?, lastdiff?, drev)}\\n     toconfirm = {}  # {node: (force, {precnode}, drev)}\\n     for node in nodelist:\\n         ctx = unfi[node]\\n         # For tags like \\\"D123\\\", put them into \\\"toconfirm\\\" to verify later\\n         precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))\\n         for n in precnodes:\\n             if has_node(n):\\n                 for tag in unfi.nodetags(n):\\n                     m = _differentialrevisiontagre.match(tag)\\n                     if m:\\n                         toconfirm[node] = (0, set(precnodes), int(m.group(1)))\\n                         break\\n                 else:\\n                     continue  # move to next predecessor\\n                 break  # found a tag, stop\\n         else:\\n             # Check commit message\\n             m = _differentialrevisiondescre.search(ctx.description())\\n             if m:\\n                 toconfirm[node] = (1, set(precnodes), int(m.group('id')))\\n \\n     # Double check if tags are genuine by collecting all old nodes from\\n     # Phabricator, and expect precursors overlap with it.\\n     if toconfirm:\\n         drevs = [drev for force, precs, drev in toconfirm.values()]\\n         alldiffs = callconduit(\\n             unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}\\n         )\\n         getnode = lambda d: bin(getdiffmeta(d).get(b'node', b'')) or None\\n         for newnode, (force, precset, drev) in toconfirm.items():\\n             diffs = [\\n                 d for d in alldiffs.values() if int(d[b'revisionID']) == drev\\n             ]\\n \\n             # \\\"precursors\\\" as known by Phabricator\\n             phprecset = set(getnode(d) for d in diffs)\\n \\n             # Ignore if precursors (Phabricator and local repo) do not overlap,\\n             # and force is not set (when commit message says nothing)\\n             if not force and not bool(phprecset & precset):\\n                 tagname = b'D%d' % drev\\n                 tags.tag(\\n                     repo,\\n                     tagname,\\n                     nullid,\\n                     message=None,\\n                     user=None,\\n                     date=None,\\n                     local=True,\\n                 )\\n                 unfi.ui.warn(\\n                     _(\\n                         b'D%d: local tag removed - does not match '\\n                         b'Differential history\\\\n'\\n                     )\\n                     % drev\\n                 )\\n                 continue\\n \\n             # Find the last node using Phabricator metadata, and make sure it\\n             # exists in the repo\\n             oldnode = lastdiff = None\\n             if diffs:\\n                 lastdiff = max(diffs, key=lambda d: int(d[b'id']))\\n                 oldnode = getnode(lastdiff)\\n                 if oldnode and not has_node(oldnode):\\n                     oldnode = None\\n \\n             result[newnode] = (oldnode, lastdiff, drev)\\n \\n     return result\\n \\n \\n def getdrevmap(repo, revs):\\n     \\\"\\\"\\\"Return a dict mapping each rev in `revs` to their Differential Revision\\n     ID or None.\\n     \\\"\\\"\\\"\\n     result = {}\\n     for rev in revs:\\n         result[rev] = None\\n         ctx = repo[rev]\\n         # Check commit message\\n         m = _differentialrevisiondescre.search(ctx.description())\\n         if m:\\n             result[rev] = int(m.group('id'))\\n             continue\\n         # Check tags\\n         for tag in repo.nodetags(ctx.node()):\\n             m = _differentialrevisiontagre.match(tag)\\n             if m:\\n                 result[rev] = int(m.group(1))\\n                 break\\n \\n     return result\\n \\n \\n def getdiff(ctx, diffopts):\\n     \\\"\\\"\\\"plain-text diff without header (user, commit message, etc)\\\"\\\"\\\"\\n     output = util.stringio()\\n     for chunk, _label in patch.diffui(\\n         ctx.repo(), ctx.p1().node(), ctx.node(), None, opts=diffopts\\n     ):\\n         output.write(chunk)\\n     return output.getvalue()\\n \\n \\n class DiffChangeType(object):\\n     ADD = 1\\n     CHANGE = 2\\n     DELETE = 3\\n     MOVE_AWAY = 4\\n     COPY_AWAY = 5\\n     MOVE_HERE = 6\\n     COPY_HERE = 7\\n     MULTICOPY = 8\\n \\n \\n class DiffFileType(object):\\n     TEXT = 1\\n     IMAGE = 2\\n     BINARY = 3\\n \\n \\n @attr.s\\n class phabhunk(dict):\\n     \\\"\\\"\\\"Represents a Differential hunk, which is owned by a Differential change\\n     \\\"\\\"\\\"\\n \\n     oldOffset = attr.ib(default=0)  # camelcase-required\\n     oldLength = attr.ib(default=0)  # camelcase-required\\n     newOffset = attr.ib(default=0)  # camelcase-required\\n     newLength = attr.ib(default=0)  # camelcase-required\\n     corpus = attr.ib(default='')\\n     # These get added to the phabchange's equivalents\\n     addLines = attr.ib(default=0)  # camelcase-required\\n     delLines = attr.ib(default=0)  # camelcase-required\\n \\n \\n @attr.s\\n class phabchange(object):\\n     \\\"\\\"\\\"Represents a Differential change, owns Differential hunks and owned by a\\n     Differential diff.  Each one represents one file in a diff.\\n     \\\"\\\"\\\"\\n \\n     currentPath = attr.ib(default=None)  # camelcase-required\\n     oldPath = attr.ib(default=None)  # camelcase-required\\n     awayPaths = attr.ib(default=attr.Factory(list))  # camelcase-required\\n     metadata = attr.ib(default=attr.Factory(dict))\\n     oldProperties = attr.ib(default=attr.Factory(dict))  # camelcase-required\\n     newProperties = attr.ib(default=attr.Factory(dict))  # camelcase-required\\n     type = attr.ib(default=DiffChangeType.CHANGE)\\n     fileType = attr.ib(default=DiffFileType.TEXT)  # camelcase-required\\n     commitHash = attr.ib(default=None)  # camelcase-required\\n     addLines = attr.ib(default=0)  # camelcase-required\\n     delLines = attr.ib(default=0)  # camelcase-required\\n     hunks = attr.ib(default=attr.Factory(list))\\n \\n     def copynewmetadatatoold(self):\\n         for key in list(self.metadata.keys()):\\n             newkey = key.replace(b'new:', b'old:')\\n             self.metadata[newkey] = self.metadata[key]\\n \\n     def addoldmode(self, value):\\n         self.oldProperties[b'unix:filemode'] = value\\n \\n     def addnewmode(self, value):\\n         self.newProperties[b'unix:filemode'] = value\\n \\n     def addhunk(self, hunk):\\n         if not isinstance(hunk, phabhunk):\\n             raise error.Abort(b'phabchange.addhunk only takes phabhunks')\\n         self.hunks.append(pycompat.byteskwargs(attr.asdict(hunk)))\\n         # It's useful to include these stats since the Phab web UI shows them,\\n         # and uses them to estimate how large a change a Revision is. Also used\\n         # in email subjects for the [+++--] bit.\\n         self.addLines += hunk.addLines\\n         self.delLines += hunk.delLines\\n \\n \\n @attr.s\\n class phabdiff(object):\\n     \\\"\\\"\\\"Represents a Differential diff, owns Differential changes.  Corresponds\\n     to a commit.\\n     \\\"\\\"\\\"\\n \\n     # Doesn't seem to be any reason to send this (output of uname -n)\\n     sourceMachine = attr.ib(default=b'')  # camelcase-required\\n     sourcePath = attr.ib(default=b'\\/')  # camelcase-required\\n     sourceControlBaseRevision = attr.ib(default=b'0' * 40)  # camelcase-required\\n     sourceControlPath = attr.ib(default=b'\\/')  # camelcase-required\\n     sourceControlSystem = attr.ib(default=b'hg')  # camelcase-required\\n     branch = attr.ib(default=b'default')\\n     bookmark = attr.ib(default=None)\\n     creationMethod = attr.ib(default=b'phabsend')  # camelcase-required\\n     lintStatus = attr.ib(default=b'none')  # camelcase-required\\n     unitStatus = attr.ib(default=b'none')  # camelcase-required\\n     changes = attr.ib(default=attr.Factory(dict))\\n     repositoryPHID = attr.ib(default=None)  # camelcase-required\\n \\n     def addchange(self, change):\\n         if not isinstance(change, phabchange):\\n             raise error.Abort(b'phabdiff.addchange only takes phabchanges')\\n         self.changes[change.currentPath] = pycompat.byteskwargs(\\n             attr.asdict(change)\\n         )\\n \\n \\n def maketext(pchange, ctx, fname):\\n     \\\"\\\"\\\"populate the phabchange for a text file\\\"\\\"\\\"\\n     repo = ctx.repo()\\n     fmatcher = match.exact([fname])\\n     diffopts = mdiff.diffopts(git=True, context=32767)\\n     _pfctx, _fctx, header, fhunks = next(\\n         patch.diffhunks(repo, ctx.p1(), ctx, fmatcher, opts=diffopts)\\n     )\\n \\n     for fhunk in fhunks:\\n         (oldOffset, oldLength, newOffset, newLength), lines = fhunk\\n         corpus = b''.join(lines[1:])\\n         shunk = list(header)\\n         shunk.extend(lines)\\n         _mf, _mt, addLines, delLines, _hb = patch.diffstatsum(\\n             patch.diffstatdata(util.iterlines(shunk))\\n         )\\n         pchange.addhunk(\\n             phabhunk(\\n                 oldOffset,\\n                 oldLength,\\n                 newOffset,\\n                 newLength,\\n                 corpus,\\n                 addLines,\\n                 delLines,\\n             )\\n         )\\n \\n \\n def uploadchunks(fctx, fphid):\\n     \\\"\\\"\\\"upload large binary files as separate chunks.\\n     Phab requests chunking over 8MiB, and splits into 4MiB chunks\\n     \\\"\\\"\\\"\\n     ui = fctx.repo().ui\\n     chunks = callconduit(ui, b'file.querychunks', {b'filePHID': fphid})\\n     with ui.makeprogress(\\n         _(b'uploading file chunks'), unit=_(b'chunks'), total=len(chunks)\\n     ) as progress:\\n         for chunk in chunks:\\n             progress.increment()\\n             if chunk[b'complete']:\\n                 continue\\n             bstart = int(chunk[b'byteStart'])\\n             bend = int(chunk[b'byteEnd'])\\n             callconduit(\\n                 ui,\\n                 b'file.uploadchunk',\\n                 {\\n                     b'filePHID': fphid,\\n                     b'byteStart': bstart,\\n                     b'data': base64.b64encode(fctx.data()[bstart:bend]),\\n                     b'dataEncoding': b'base64',\\n                 },\\n             )\\n \\n \\n def uploadfile(fctx):\\n     \\\"\\\"\\\"upload binary files to Phabricator\\\"\\\"\\\"\\n     repo = fctx.repo()\\n     ui = repo.ui\\n     fname = fctx.path()\\n     size = fctx.size()\\n     fhash = pycompat.bytestr(hashlib.sha256(fctx.data()).hexdigest())\\n \\n     # an allocate call is required first to see if an upload is even required\\n     # (Phab might already have it) and to determine if chunking is needed\\n     allocateparams = {\\n         b'name': fname,\\n         b'contentLength': size,\\n         b'contentHash': fhash,\\n     }\\n     filealloc = callconduit(ui, b'file.allocate', allocateparams)\\n     fphid = filealloc[b'filePHID']\\n \\n     if filealloc[b'upload']:\\n         ui.write(_(b'uploading %s\\\\n') % bytes(fctx))\\n         if not fphid:\\n             uploadparams = {\\n                 b'name': fname,\\n                 b'data_base64': base64.b64encode(fctx.data()),\\n             }\\n             fphid = callconduit(ui, b'file.upload', uploadparams)\\n         else:\\n             uploadchunks(fctx, fphid)\\n     else:\\n         ui.debug(b'server already has %s\\\\n' % bytes(fctx))\\n \\n     if not fphid:\\n         raise error.Abort(b'Upload of %s failed.' % bytes(fctx))\\n \\n     return fphid\\n \\n \\n def addoldbinary(pchange, fctx):\\n     \\\"\\\"\\\"add the metadata for the previous version of a binary file to the\\n     phabchange for the new version\\n     \\\"\\\"\\\"\\n     oldfctx = fctx.p1()\\n     if fctx.cmp(oldfctx):\\n         # Files differ, add the old one\\n         pchange.metadata[b'old:file:size'] = oldfctx.size()\\n         mimeguess, _enc = mimetypes.guess_type(\\n             encoding.unifromlocal(oldfctx.path())\\n         )\\n         if mimeguess:\\n             pchange.metadata[b'old:file:mime-type'] = pycompat.bytestr(\\n                 mimeguess\\n             )\\n         fphid = uploadfile(oldfctx)\\n         pchange.metadata[b'old:binary-phid'] = fphid\\n     else:\\n         # If it's left as IMAGE\\/BINARY web UI might try to display it\\n         pchange.fileType = DiffFileType.TEXT\\n         pchange.copynewmetadatatoold()\\n \\n \\n def makebinary(pchange, fctx):\\n     \\\"\\\"\\\"populate the phabchange for a binary file\\\"\\\"\\\"\\n     pchange.fileType = DiffFileType.BINARY\\n     fphid = uploadfile(fctx)\\n     pchange.metadata[b'new:binary-phid'] = fphid\\n     pchange.metadata[b'new:file:size'] = fctx.size()\\n     mimeguess, _enc = mimetypes.guess_type(encoding.unifromlocal(fctx.path()))\\n     if mimeguess:\\n         mimeguess = pycompat.bytestr(mimeguess)\\n         pchange.metadata[b'new:file:mime-type'] = mimeguess\\n         if mimeguess.startswith(b'image\\/'):\\n             pchange.fileType = DiffFileType.IMAGE\\n \\n \\n # Copied from mercurial\\/patch.py\\n gitmode = {b'l': b'120000', b'x': b'100755', b'': b'100644'}\\n \\n \\n def notutf8(fctx):\\n     \\\"\\\"\\\"detect non-UTF-8 text files since Phabricator requires them to be marked\\n     as binary\\n     \\\"\\\"\\\"\\n     try:\\n         fctx.data().decode('utf-8')\\n         if fctx.parents():\\n             fctx.p1().data().decode('utf-8')\\n         return False\\n     except UnicodeDecodeError:\\n         fctx.repo().ui.write(\\n             _(b'file %s detected as non-UTF-8, marked as binary\\\\n')\\n             % fctx.path()\\n         )\\n         return True\\n \\n \\n def addremoved(pdiff, ctx, removed):\\n     \\\"\\\"\\\"add removed files to the phabdiff. Shouldn't include moves\\\"\\\"\\\"\\n     for fname in removed:\\n         pchange = phabchange(\\n             currentPath=fname, oldPath=fname, type=DiffChangeType.DELETE\\n         )\\n         pchange.addoldmode(gitmode[ctx.p1()[fname].flags()])\\n         fctx = ctx.p1()[fname]\\n         if not (fctx.isbinary() or notutf8(fctx)):\\n             maketext(pchange, ctx, fname)\\n \\n         pdiff.addchange(pchange)\\n \\n \\n def addmodified(pdiff, ctx, modified):\\n     \\\"\\\"\\\"add modified files to the phabdiff\\\"\\\"\\\"\\n     for fname in modified:\\n         fctx = ctx[fname]\\n         pchange = phabchange(currentPath=fname, oldPath=fname)\\n         filemode = gitmode[ctx[fname].flags()]\\n         originalmode = gitmode[ctx.p1()[fname].flags()]\\n         if filemode != originalmode:\\n             pchange.addoldmode(originalmode)\\n             pchange.addnewmode(filemode)\\n \\n         if fctx.isbinary() or notutf8(fctx):\\n             makebinary(pchange, fctx)\\n             addoldbinary(pchange, fctx)\\n         else:\\n             maketext(pchange, ctx, fname)\\n \\n         pdiff.addchange(pchange)\\n \\n \\n def addadded(pdiff, ctx, added, removed):\\n     \\\"\\\"\\\"add file adds to the phabdiff, both new files and copies\\/moves\\\"\\\"\\\"\\n     # Keep track of files that've been recorded as moved\\/copied, so if there are\\n     # additional copies we can mark them (moves get removed from removed)\\n     copiedchanges = {}\\n     movedchanges = {}\\n     for fname in added:\\n         fctx = ctx[fname]\\n         pchange = phabchange(currentPath=fname)\\n \\n         filemode = gitmode[ctx[fname].flags()]\\n         renamed = fctx.renamed()\\n \\n         if renamed:\\n             originalfname = renamed[0]\\n             originalmode = gitmode[ctx.p1()[originalfname].flags()]\\n             pchange.oldPath = originalfname\\n \\n             if originalfname in removed:\\n                 origpchange = phabchange(\\n                     currentPath=originalfname,\\n                     oldPath=originalfname,\\n                     type=DiffChangeType.MOVE_AWAY,\\n                     awayPaths=[fname],\\n                 )\\n                 movedchanges[originalfname] = origpchange\\n                 removed.remove(originalfname)\\n                 pchange.type = DiffChangeType.MOVE_HERE\\n             elif originalfname in movedchanges:\\n                 movedchanges[originalfname].type = DiffChangeType.MULTICOPY\\n                 movedchanges[originalfname].awayPaths.append(fname)\\n                 pchange.type = DiffChangeType.COPY_HERE\\n             else:  # pure copy\\n                 if originalfname not in copiedchanges:\\n                     origpchange = phabchange(\\n                         currentPath=originalfname, type=DiffChangeType.COPY_AWAY\\n                     )\\n                     copiedchanges[originalfname] = origpchange\\n                 else:\\n                     origpchange = copiedchanges[originalfname]\\n                 origpchange.awayPaths.append(fname)\\n                 pchange.type = DiffChangeType.COPY_HERE\\n \\n             if filemode != originalmode:\\n                 pchange.addoldmode(originalmode)\\n                 pchange.addnewmode(filemode)\\n         else:  # Brand-new file\\n             pchange.addnewmode(gitmode[fctx.flags()])\\n             pchange.type = DiffChangeType.ADD\\n \\n         if fctx.isbinary() or notutf8(fctx):\\n             makebinary(pchange, fctx)\\n             if renamed:\\n                 addoldbinary(pchange, fctx)\\n         else:\\n             maketext(pchange, ctx, fname)\\n \\n         pdiff.addchange(pchange)\\n \\n     for _path, copiedchange in copiedchanges.items():\\n         pdiff.addchange(copiedchange)\\n     for _path, movedchange in movedchanges.items():\\n         pdiff.addchange(movedchange)\\n \\n \\n def creatediff(ctx):\\n     \\\"\\\"\\\"create a Differential Diff\\\"\\\"\\\"\\n     repo = ctx.repo()\\n     repophid = getrepophid(repo)\\n     # Create a \\\"Differential Diff\\\" via \\\"differential.creatediff\\\" API\\n     pdiff = phabdiff(\\n         sourceControlBaseRevision=b'%s' % ctx.p1().hex(),\\n         branch=b'%s' % ctx.branch(),\\n     )\\n     modified, added, removed, _d, _u, _i, _c = ctx.p1().status(ctx)\\n     # addadded will remove moved files from removed, so addremoved won't get\\n     # them\\n     addadded(pdiff, ctx, added, removed)\\n     addmodified(pdiff, ctx, modified)\\n     addremoved(pdiff, ctx, removed)\\n     if repophid:\\n         pdiff.repositoryPHID = repophid\\n     diff = callconduit(\\n         repo.ui,\\n         b'differential.creatediff',\\n         pycompat.byteskwargs(attr.asdict(pdiff)),\\n     )\\n     if not diff:\\n         raise error.Abort(_(b'cannot create diff for %s') % ctx)\\n     return diff\\n \\n \\n def writediffproperties(ctx, diff):\\n     \\\"\\\"\\\"write metadata to diff so patches could be applied losslessly\\\"\\\"\\\"\\n     # creatediff returns with a diffid but query returns with an id\\n     diffid = diff.get(b'diffid', diff.get(b'id'))\\n     params = {\\n         b'diff_id': diffid,\\n         b'name': b'hg:meta',\\n         b'data': templatefilters.json(\\n             {\\n                 b'user': ctx.user(),\\n                 b'date': b'%d %d' % ctx.date(),\\n                 b'branch': ctx.branch(),\\n                 b'node': ctx.hex(),\\n                 b'parent': ctx.p1().hex(),\\n             }\\n         ),\\n     }\\n     callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)\\n \\n     params = {\\n         b'diff_id': diffid,\\n         b'name': b'local:commits',\\n         b'data': templatefilters.json(\\n             {\\n                 ctx.hex(): {\\n                     b'author': stringutil.person(ctx.user()),\\n                     b'authorEmail': stringutil.email(ctx.user()),\\n                     b'time': int(ctx.date()[0]),\\n                     b'commit': ctx.hex(),\\n                     b'parents': [ctx.p1().hex()],\\n                     b'branch': ctx.branch(),\\n                 },\\n             }\\n         ),\\n     }\\n     callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)\\n \\n \\n def createdifferentialrevision(\\n     ctx,\\n     revid=None,\\n     parentrevphid=None,\\n     oldnode=None,\\n     olddiff=None,\\n     actions=None,\\n     comment=None,\\n ):\\n     \\\"\\\"\\\"create or update a Differential Revision\\n \\n     If revid is None, create a new Differential Revision, otherwise update\\n     revid. If parentrevphid is not None, set it as a dependency.\\n \\n     If oldnode is not None, check if the patch content (without commit message\\n     and metadata) has changed before creating another diff.\\n \\n     If actions is not None, they will be appended to the transaction.\\n     \\\"\\\"\\\"\\n     repo = ctx.repo()\\n     if oldnode:\\n         diffopts = mdiff.diffopts(git=True, context=32767)\\n         oldctx = repo.unfiltered()[oldnode]\\n         neednewdiff = getdiff(ctx, diffopts) != getdiff(oldctx, diffopts)\\n     else:\\n         neednewdiff = True\\n \\n     transactions = []\\n     if neednewdiff:\\n         diff = creatediff(ctx)\\n         transactions.append({b'type': b'update', b'value': diff[b'phid']})\\n         if comment:\\n             transactions.append({b'type': b'comment', b'value': comment})\\n     else:\\n         # Even if we don't need to upload a new diff because the patch content\\n         # does not change. We might still need to update its metadata so\\n         # pushers could know the correct node metadata.\\n         assert olddiff\\n         diff = olddiff\\n     writediffproperties(ctx, diff)\\n \\n     # Set the parent Revision every time, so commit re-ordering is picked-up\\n     if parentrevphid:\\n         transactions.append(\\n             {b'type': b'parents.set', b'value': [parentrevphid]}\\n         )\\n \\n     if actions:\\n         transactions += actions\\n \\n     # Parse commit message and update related fields.\\n     desc = ctx.description()\\n     info = callconduit(\\n         repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}\\n     )\\n     for k, v in info[b'fields'].items():\\n         if k in [b'title', b'summary', b'testPlan']:\\n             transactions.append({b'type': k, b'value': v})\\n \\n     params = {b'transactions': transactions}\\n     if revid is not None:\\n         # Update an existing Differential Revision\\n         params[b'objectIdentifier'] = revid\\n \\n     revision = callconduit(repo.ui, b'differential.revision.edit', params)\\n     if not revision:\\n         raise error.Abort(_(b'cannot create revision for %s') % ctx)\\n \\n     return revision, diff\\n \\n \\n-def userphids(repo, names):\\n+def userphids(ui, names):\\n     \\\"\\\"\\\"convert user names to PHIDs\\\"\\\"\\\"\\n     names = [name.lower() for name in names]\\n     query = {b'constraints': {b'usernames': names}}\\n-    result = callconduit(repo.ui, b'user.search', query)\\n+    result = callconduit(ui, b'user.search', query)\\n     # username not found is not an error of the API. So check if we have missed\\n     # some names here.\\n     data = result[b'data']\\n     resolved = set(entry[b'fields'][b'username'].lower() for entry in data)\\n     unresolved = set(names) - resolved\\n     if unresolved:\\n         raise error.Abort(\\n             _(b'unknown username: %s') % b' '.join(sorted(unresolved))\\n         )\\n     return [entry[b'phid'] for entry in data]\\n \\n \\n @vcrcommand(\\n     b'phabsend',\\n     [\\n         (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),\\n         (b'', b'amend', True, _(b'update commit messages')),\\n         (b'', b'reviewer', [], _(b'specify reviewers')),\\n         (b'', b'blocker', [], _(b'specify blocking reviewers')),\\n         (\\n             b'm',\\n             b'comment',\\n             b'',\\n             _(b'add a comment to Revisions with new\\/updated Diffs'),\\n         ),\\n         (b'', b'confirm', None, _(b'ask for confirmation before sending')),\\n     ],\\n     _(b'REV [OPTIONS]'),\\n     helpcategory=command.CATEGORY_IMPORT_EXPORT,\\n )\\n def phabsend(ui, repo, *revs, **opts):\\n     \\\"\\\"\\\"upload changesets to Phabricator\\n \\n     If there are multiple revisions specified, they will be send as a stack\\n     with a linear dependencies relationship using the order specified by the\\n     revset.\\n \\n     For the first time uploading changesets, local tags will be created to\\n     maintain the association. After the first time, phabsend will check\\n     obsstore and tags information so it can figure out whether to update an\\n     existing Differential Revision, or create a new one.\\n \\n     If --amend is set, update commit messages so they have the\\n     ``Differential Revision`` URL, remove related tags. This is similar to what\\n     arcanist will do, and is more desired in author-push workflows. Otherwise,\\n     use local tags to record the ``Differential Revision`` association.\\n \\n     The --confirm option lets you confirm changesets before sending them. You\\n     can also add following to your configuration file to make it default\\n     behaviour::\\n \\n         [phabsend]\\n         confirm = true\\n \\n     phabsend will check obsstore and the above association to decide whether to\\n     update an existing Differential Revision, or create a new one.\\n     \\\"\\\"\\\"\\n     opts = pycompat.byteskwargs(opts)\\n     revs = list(revs) + opts.get(b'rev', [])\\n     revs = scmutil.revrange(repo, revs)\\n     revs.sort()  # ascending order to preserve topological parent\\/child in phab\\n \\n     if not revs:\\n         raise error.Abort(_(b'phabsend requires at least one changeset'))\\n     if opts.get(b'amend'):\\n         cmdutil.checkunfinished(repo)\\n \\n     # {newnode: (oldnode, olddiff, olddrev}\\n     oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])\\n \\n     confirm = ui.configbool(b'phabsend', b'confirm')\\n     confirm |= bool(opts.get(b'confirm'))\\n     if confirm:\\n         confirmed = _confirmbeforesend(repo, revs, oldmap)\\n         if not confirmed:\\n             raise error.Abort(_(b'phabsend cancelled'))\\n \\n     actions = []\\n     reviewers = opts.get(b'reviewer', [])\\n     blockers = opts.get(b'blocker', [])\\n     phids = []\\n     if reviewers:\\n-        phids.extend(userphids(repo, reviewers))\\n+        phids.extend(userphids(repo.ui, reviewers))\\n     if blockers:\\n         phids.extend(\\n-            map(lambda phid: b'blocking(%s)' % phid, userphids(repo, blockers))\\n+            map(\\n+                lambda phid: b'blocking(%s)' % phid,\\n+                userphids(repo.ui, blockers),\\n+            )\\n         )\\n     if phids:\\n         actions.append({b'type': b'reviewers.add', b'value': phids})\\n \\n     drevids = []  # [int]\\n     diffmap = {}  # {newnode: diff}\\n \\n     # Send patches one by one so we know their Differential Revision PHIDs and\\n     # can provide dependency relationship\\n     lastrevphid = None\\n     for rev in revs:\\n         ui.debug(b'sending rev %d\\\\n' % rev)\\n         ctx = repo[rev]\\n \\n         # Get Differential Revision ID\\n         oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))\\n         if oldnode != ctx.node() or opts.get(b'amend'):\\n             # Create or update Differential Revision\\n             revision, diff = createdifferentialrevision(\\n                 ctx,\\n                 revid,\\n                 lastrevphid,\\n                 oldnode,\\n                 olddiff,\\n                 actions,\\n                 opts.get(b'comment'),\\n             )\\n             diffmap[ctx.node()] = diff\\n             newrevid = int(revision[b'object'][b'id'])\\n             newrevphid = revision[b'object'][b'phid']\\n             if revid:\\n                 action = b'updated'\\n             else:\\n                 action = b'created'\\n \\n             # Create a local tag to note the association, if commit message\\n             # does not have it already\\n             m = _differentialrevisiondescre.search(ctx.description())\\n             if not m or int(m.group('id')) != newrevid:\\n                 tagname = b'D%d' % newrevid\\n                 tags.tag(\\n                     repo,\\n                     tagname,\\n                     ctx.node(),\\n                     message=None,\\n                     user=None,\\n                     date=None,\\n                     local=True,\\n                 )\\n         else:\\n             # Nothing changed. But still set \\\"newrevphid\\\" so the next revision\\n             # could depend on this one and \\\"newrevid\\\" for the summary line.\\n             newrevphid = querydrev(repo.ui, b'%d' % revid)[0][b'phid']\\n             newrevid = revid\\n             action = b'skipped'\\n \\n         actiondesc = ui.label(\\n             {\\n                 b'created': _(b'created'),\\n                 b'skipped': _(b'skipped'),\\n                 b'updated': _(b'updated'),\\n             }[action],\\n             b'phabricator.action.%s' % action,\\n         )\\n         drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')\\n         nodedesc = ui.label(bytes(ctx), b'phabricator.node')\\n         desc = ui.label(ctx.description().split(b'\\\\n')[0], b'phabricator.desc')\\n         ui.write(\\n             _(b'%s - %s - %s: %s\\\\n') % (drevdesc, actiondesc, nodedesc, desc)\\n         )\\n         drevids.append(newrevid)\\n         lastrevphid = newrevphid\\n \\n     # Update commit messages and remove tags\\n     if opts.get(b'amend'):\\n         unfi = repo.unfiltered()\\n         drevs = callconduit(ui, b'differential.query', {b'ids': drevids})\\n         with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):\\n             wnode = unfi[b'.'].node()\\n             mapping = {}  # {oldnode: [newnode]}\\n             for i, rev in enumerate(revs):\\n                 old = unfi[rev]\\n                 drevid = drevids[i]\\n                 drev = [d for d in drevs if int(d[b'id']) == drevid][0]\\n                 newdesc = getdescfromdrev(drev)\\n                 # Make sure commit message contain \\\"Differential Revision\\\"\\n                 if old.description() != newdesc:\\n                     if old.phase() == phases.public:\\n                         ui.warn(\\n                             _(b\\\"warning: not updating public commit %s\\\\n\\\")\\n                             % scmutil.formatchangeid(old)\\n                         )\\n                         continue\\n                     parents = [\\n                         mapping.get(old.p1().node(), (old.p1(),))[0],\\n                         mapping.get(old.p2().node(), (old.p2(),))[0],\\n                     ]\\n                     new = context.metadataonlyctx(\\n                         repo,\\n                         old,\\n                         parents=parents,\\n                         text=newdesc,\\n                         user=old.user(),\\n                         date=old.date(),\\n                         extra=old.extra(),\\n                     )\\n \\n                     newnode = new.commit()\\n \\n                     mapping[old.node()] = [newnode]\\n                     # Update diff property\\n                     # If it fails just warn and keep going, otherwise the DREV\\n                     # associations will be lost\\n                     try:\\n                         writediffproperties(unfi[newnode], diffmap[old.node()])\\n                     except util.urlerr.urlerror:\\n                         ui.warnnoi18n(\\n                             b'Failed to update metadata for D%d\\\\n' % drevid\\n                         )\\n                 # Remove local tags since it's no longer necessary\\n                 tagname = b'D%d' % drevid\\n                 if tagname in repo.tags():\\n                     tags.tag(\\n                         repo,\\n                         tagname,\\n                         nullid,\\n                         message=None,\\n                         user=None,\\n                         date=None,\\n                         local=True,\\n                     )\\n             scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)\\n             if wnode in mapping:\\n                 unfi.setparents(mapping[wnode][0])\\n \\n \\n # Map from \\\"hg:meta\\\" keys to header understood by \\\"hg import\\\". The order is\\n # consistent with \\\"hg export\\\" output.\\n _metanamemap = util.sortdict(\\n     [\\n         (b'user', b'User'),\\n         (b'date', b'Date'),\\n         (b'branch', b'Branch'),\\n         (b'node', b'Node ID'),\\n         (b'parent', b'Parent '),\\n     ]\\n )\\n \\n \\n def _confirmbeforesend(repo, revs, oldmap):\\n     url, token = readurltoken(repo.ui)\\n     ui = repo.ui\\n     for rev in revs:\\n         ctx = repo[rev]\\n         desc = ctx.description().splitlines()[0]\\n         oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))\\n         if drevid:\\n             drevdesc = ui.label(b'D%d' % drevid, b'phabricator.drev')\\n         else:\\n             drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')\\n \\n         ui.write(\\n             _(b'%s - %s: %s\\\\n')\\n             % (\\n                 drevdesc,\\n                 ui.label(bytes(ctx), b'phabricator.node'),\\n                 ui.label(desc, b'phabricator.desc'),\\n             )\\n         )\\n \\n     if ui.promptchoice(\\n         _(b'Send the above changes to %s (yn)?$$ &Yes $$ &No') % url\\n     ):\\n         return False\\n \\n     return True\\n \\n \\n _knownstatusnames = {\\n     b'accepted',\\n     b'needsreview',\\n     b'needsrevision',\\n     b'closed',\\n     b'abandoned',\\n     b'changesplanned',\\n }\\n \\n \\n def _getstatusname(drev):\\n     \\\"\\\"\\\"get normalized status name from a Differential Revision\\\"\\\"\\\"\\n     return drev[b'statusName'].replace(b' ', b'').lower()\\n \\n \\n # Small language to specify differential revisions. Support symbols: (), :X,\\n # +, and -.\\n \\n _elements = {\\n     # token-type: binding-strength, primary, prefix, infix, suffix\\n     b'(': (12, None, (b'group', 1, b')'), None, None),\\n     b':': (8, None, (b'ancestors', 8), None, None),\\n     b'&': (5, None, None, (b'and_', 5), None),\\n     b'+': (4, None, None, (b'add', 4), None),\\n     b'-': (4, None, None, (b'sub', 4), None),\\n     b')': (0, None, None, None, None),\\n     b'symbol': (0, b'symbol', None, None, None),\\n     b'end': (0, None, None, None, None),\\n }\\n \\n \\n def _tokenize(text):\\n     view = memoryview(text)  # zero-copy slice\\n     special = b'():+-& '\\n     pos = 0\\n     length = len(text)\\n     while pos \\u003c length:\\n         symbol = b''.join(\\n             itertools.takewhile(\\n                 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])\\n             )\\n         )\\n         if symbol:\\n             yield (b'symbol', symbol, pos)\\n             pos += len(symbol)\\n         else:  # special char, ignore space\\n             if text[pos : pos + 1] != b' ':\\n                 yield (text[pos : pos + 1], None, pos)\\n             pos += 1\\n     yield (b'end', None, pos)\\n \\n \\n def _parse(text):\\n     tree, pos = parser.parser(_elements).parse(_tokenize(text))\\n     if pos != len(text):\\n         raise error.ParseError(b'invalid token', pos)\\n     return tree\\n \\n \\n def _parsedrev(symbol):\\n     \\\"\\\"\\\"str -\\u003e int or None, ex. 'D45' -\\u003e 45; '12' -\\u003e 12; 'x' -\\u003e None\\\"\\\"\\\"\\n     if symbol.startswith(b'D') and symbol[1:].isdigit():\\n         return int(symbol[1:])\\n     if symbol.isdigit():\\n         return int(symbol)\\n \\n \\n def _prefetchdrevs(tree):\\n     \\\"\\\"\\\"return ({single-drev-id}, {ancestor-drev-id}) to prefetch\\\"\\\"\\\"\\n     drevs = set()\\n     ancestordrevs = set()\\n     op = tree[0]\\n     if op == b'symbol':\\n         r = _parsedrev(tree[1])\\n         if r:\\n             drevs.add(r)\\n     elif op == b'ancestors':\\n         r, a = _prefetchdrevs(tree[1])\\n         drevs.update(r)\\n         ancestordrevs.update(r)\\n         ancestordrevs.update(a)\\n     else:\\n         for t in tree[1:]:\\n             r, a = _prefetchdrevs(t)\\n             drevs.update(r)\\n             ancestordrevs.update(a)\\n     return drevs, ancestordrevs\\n \\n \\n def querydrev(ui, spec):\\n     \\\"\\\"\\\"return a list of \\\"Differential Revision\\\" dicts\\n \\n     spec is a string using a simple query language, see docstring in phabread\\n     for details.\\n \\n     A \\\"Differential Revision dict\\\" looks like:\\n \\n         {\\n             \\\"activeDiffPHID\\\": \\\"PHID-DIFF-xoqnjkobbm6k4dk6hi72\\\",\\n             \\\"authorPHID\\\": \\\"PHID-USER-tv3ohwc4v4jeu34otlye\\\",\\n             \\\"auxiliary\\\": {\\n               \\\"phabricator:depends-on\\\": [\\n                 \\\"PHID-DREV-gbapp366kutjebt7agcd\\\"\\n               ]\\n               \\\"phabricator:projects\\\": [],\\n             },\\n             \\\"branch\\\": \\\"default\\\",\\n             \\\"ccs\\\": [],\\n             \\\"commits\\\": [],\\n             \\\"dateCreated\\\": \\\"1499181406\\\",\\n             \\\"dateModified\\\": \\\"1499182103\\\",\\n             \\\"diffs\\\": [\\n               \\\"3\\\",\\n               \\\"4\\\",\\n             ],\\n             \\\"hashes\\\": [],\\n             \\\"id\\\": \\\"2\\\",\\n             \\\"lineCount\\\": \\\"2\\\",\\n             \\\"phid\\\": \\\"PHID-DREV-672qvysjcczopag46qty\\\",\\n             \\\"properties\\\": {},\\n             \\\"repositoryPHID\\\": \\\"PHID-REPO-hub2hx62ieuqeheznasv\\\",\\n             \\\"reviewers\\\": [],\\n             \\\"sourcePath\\\": null\\n             \\\"status\\\": \\\"0\\\",\\n             \\\"statusName\\\": \\\"Needs Review\\\",\\n             \\\"summary\\\": \\\"\\\",\\n             \\\"testPlan\\\": \\\"\\\",\\n             \\\"title\\\": \\\"example\\\",\\n             \\\"uri\\\": \\\"https:\\/\\/phab.example.com\\/D2\\\",\\n         }\\n     \\\"\\\"\\\"\\n     # TODO: replace differential.query and differential.querydiffs with\\n     # differential.diff.search because the former (and their output) are\\n     # frozen, and planned to be deprecated and removed.\\n \\n     def fetch(params):\\n         \\\"\\\"\\\"params -\\u003e single drev or None\\\"\\\"\\\"\\n         key = (params.get(b'ids') or params.get(b'phids') or [None])[0]\\n         if key in prefetched:\\n             return prefetched[key]\\n         drevs = callconduit(ui, b'differential.query', params)\\n         # Fill prefetched with the result\\n         for drev in drevs:\\n             prefetched[drev[b'phid']] = drev\\n             prefetched[int(drev[b'id'])] = drev\\n         if key not in prefetched:\\n             raise error.Abort(\\n                 _(b'cannot get Differential Revision %r') % params\\n             )\\n         return prefetched[key]\\n \\n     def getstack(topdrevids):\\n         \\\"\\\"\\\"given a top, get a stack from the bottom, [id] -\\u003e [id]\\\"\\\"\\\"\\n         visited = set()\\n         result = []\\n         queue = [{b'ids': [i]} for i in topdrevids]\\n         while queue:\\n             params = queue.pop()\\n             drev = fetch(params)\\n             if drev[b'id'] in visited:\\n                 continue\\n             visited.add(drev[b'id'])\\n             result.append(int(drev[b'id']))\\n             auxiliary = drev.get(b'auxiliary', {})\\n             depends = auxiliary.get(b'phabricator:depends-on', [])\\n             for phid in depends:\\n                 queue.append({b'phids': [phid]})\\n         result.reverse()\\n         return smartset.baseset(result)\\n \\n     # Initialize prefetch cache\\n     prefetched = {}  # {id or phid: drev}\\n \\n     tree = _parse(spec)\\n     drevs, ancestordrevs = _prefetchdrevs(tree)\\n \\n     # developer config: phabricator.batchsize\\n     batchsize = ui.configint(b'phabricator', b'batchsize')\\n \\n     # Prefetch Differential Revisions in batch\\n     tofetch = set(drevs)\\n     for r in ancestordrevs:\\n         tofetch.update(range(max(1, r - batchsize), r + 1))\\n     if drevs:\\n         fetch({b'ids': list(tofetch)})\\n     validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))\\n \\n     # Walk through the tree, return smartsets\\n     def walk(tree):\\n         op = tree[0]\\n         if op == b'symbol':\\n             drev = _parsedrev(tree[1])\\n             if drev:\\n                 return smartset.baseset([drev])\\n             elif tree[1] in _knownstatusnames:\\n                 drevs = [\\n                     r\\n                     for r in validids\\n                     if _getstatusname(prefetched[r]) == tree[1]\\n                 ]\\n                 return smartset.baseset(drevs)\\n             else:\\n                 raise error.Abort(_(b'unknown symbol: %s') % tree[1])\\n         elif op in {b'and_', b'add', b'sub'}:\\n             assert len(tree) == 3\\n             return getattr(operator, op)(walk(tree[1]), walk(tree[2]))\\n         elif op == b'group':\\n             return walk(tree[1])\\n         elif op == b'ancestors':\\n             return getstack(walk(tree[1]))\\n         else:\\n             raise error.ProgrammingError(b'illegal tree: %r' % tree)\\n \\n     return [prefetched[r] for r in walk(tree)]\\n \\n \\n def getdescfromdrev(drev):\\n     \\\"\\\"\\\"get description (commit message) from \\\"Differential Revision\\\"\\n \\n     This is similar to differential.getcommitmessage API. But we only care\\n     about limited fields: title, summary, test plan, and URL.\\n     \\\"\\\"\\\"\\n     title = drev[b'title']\\n     summary = drev[b'summary'].rstrip()\\n     testplan = drev[b'testPlan'].rstrip()\\n     if testplan:\\n         testplan = b'Test Plan:\\\\n%s' % testplan\\n     uri = b'Differential Revision: %s' % drev[b'uri']\\n     return b'\\\\n\\\\n'.join(filter(None, [title, summary, testplan, uri]))\\n \\n \\n def getdiffmeta(diff):\\n     \\\"\\\"\\\"get commit metadata (date, node, user, p1) from a diff object\\n \\n     The metadata could be \\\"hg:meta\\\", sent by phabsend, like:\\n \\n         \\\"properties\\\": {\\n           \\\"hg:meta\\\": {\\n             \\\"branch\\\": \\\"default\\\",\\n             \\\"date\\\": \\\"1499571514 25200\\\",\\n             \\\"node\\\": \\\"98c08acae292b2faf60a279b4189beb6cff1414d\\\",\\n             \\\"user\\\": \\\"Foo Bar \\u003cfoo@example.com\\u003e\\\",\\n             \\\"parent\\\": \\\"6d0abad76b30e4724a37ab8721d630394070fe16\\\"\\n           }\\n         }\\n \\n     Or converted from \\\"local:commits\\\", sent by \\\"arc\\\", like:\\n \\n         \\\"properties\\\": {\\n           \\\"local:commits\\\": {\\n             \\\"98c08acae292b2faf60a279b4189beb6cff1414d\\\": {\\n               \\\"author\\\": \\\"Foo Bar\\\",\\n               \\\"authorEmail\\\": \\\"foo@example.com\\\"\\n               \\\"branch\\\": \\\"default\\\",\\n               \\\"commit\\\": \\\"98c08acae292b2faf60a279b4189beb6cff1414d\\\",\\n               \\\"local\\\": \\\"1000\\\",\\n               \\\"message\\\": \\\"...\\\",\\n               \\\"parents\\\": [\\\"6d0abad76b30e4724a37ab8721d630394070fe16\\\"],\\n               \\\"rev\\\": \\\"98c08acae292b2faf60a279b4189beb6cff1414d\\\",\\n               \\\"summary\\\": \\\"...\\\",\\n               \\\"tag\\\": \\\"\\\",\\n               \\\"time\\\": 1499546314,\\n             }\\n           }\\n         }\\n \\n     Note: metadata extracted from \\\"local:commits\\\" will lose time zone\\n     information.\\n     \\\"\\\"\\\"\\n     props = diff.get(b'properties') or {}\\n     meta = props.get(b'hg:meta')\\n     if not meta:\\n         if props.get(b'local:commits'):\\n             commit = sorted(props[b'local:commits'].values())[0]\\n             meta = {}\\n             if b'author' in commit and b'authorEmail' in commit:\\n                 meta[b'user'] = b'%s \\u003c%s\\u003e' % (\\n                     commit[b'author'],\\n                     commit[b'authorEmail'],\\n                 )\\n             if b'time' in commit:\\n                 meta[b'date'] = b'%d 0' % int(commit[b'time'])\\n             if b'branch' in commit:\\n                 meta[b'branch'] = commit[b'branch']\\n             node = commit.get(b'commit', commit.get(b'rev'))\\n             if node:\\n                 meta[b'node'] = node\\n             if len(commit.get(b'parents', ())) \\u003e= 1:\\n                 meta[b'parent'] = commit[b'parents'][0]\\n         else:\\n             meta = {}\\n     if b'date' not in meta and b'dateCreated' in diff:\\n         meta[b'date'] = b'%s 0' % diff[b'dateCreated']\\n     if b'branch' not in meta and diff.get(b'branch'):\\n         meta[b'branch'] = diff[b'branch']\\n     if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):\\n         meta[b'parent'] = diff[b'sourceControlBaseRevision']\\n     return meta\\n \\n \\n def readpatch(ui, drevs, write):\\n     \\\"\\\"\\\"generate plain-text patch readable by 'hg import'\\n \\n     write is usually ui.write. drevs is what \\\"querydrev\\\" returns, results of\\n     \\\"differential.query\\\".\\n     \\\"\\\"\\\"\\n     # Prefetch hg:meta property for all diffs\\n     diffids = sorted(set(max(int(v) for v in drev[b'diffs']) for drev in drevs))\\n     diffs = callconduit(ui, b'differential.querydiffs', {b'ids': diffids})\\n \\n     # Generate patch for each drev\\n     for drev in drevs:\\n         ui.note(_(b'reading D%s\\\\n') % drev[b'id'])\\n \\n         diffid = max(int(v) for v in drev[b'diffs'])\\n         body = callconduit(ui, b'differential.getrawdiff', {b'diffID': diffid})\\n         desc = getdescfromdrev(drev)\\n         header = b'# HG changeset patch\\\\n'\\n \\n         # Try to preserve metadata from hg:meta property. Write hg patch\\n         # headers that can be read by the \\\"import\\\" command. See patchheadermap\\n         # and extract in mercurial\\/patch.py for supported headers.\\n         meta = getdiffmeta(diffs[b'%d' % diffid])\\n         for k in _metanamemap.keys():\\n             if k in meta:\\n                 header += b'# %s %s\\\\n' % (_metanamemap[k], meta[k])\\n \\n         content = b'%s%s\\\\n%s' % (header, desc, body)\\n         write(content)\\n \\n \\n @vcrcommand(\\n     b'phabread',\\n     [(b'', b'stack', False, _(b'read dependencies'))],\\n     _(b'DREVSPEC [OPTIONS]'),\\n     helpcategory=command.CATEGORY_IMPORT_EXPORT,\\n )\\n def phabread(ui, repo, spec, **opts):\\n     \\\"\\\"\\\"print patches from Phabricator suitable for importing\\n \\n     DREVSPEC could be a Differential Revision identity, like ``D123``, or just\\n     the number ``123``. It could also have common operators like ``+``, ``-``,\\n     ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to\\n     select a stack.\\n \\n     ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``\\n     could be used to filter patches by status. For performance reason, they\\n     only represent a subset of non-status selections and cannot be used alone.\\n \\n     For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude\\n     D2 and D4. ``:D9 & needsreview`` selects \\\"Needs Review\\\" revisions in a\\n     stack up to D9.\\n \\n     If --stack is given, follow dependencies information and read all patches.\\n     It is equivalent to the ``:`` operator.\\n     \\\"\\\"\\\"\\n     opts = pycompat.byteskwargs(opts)\\n     if opts.get(b'stack'):\\n         spec = b':(%s)' % spec\\n     drevs = querydrev(repo.ui, spec)\\n     readpatch(repo.ui, drevs, ui.write)\\n \\n \\n @vcrcommand(\\n     b'phabupdate',\\n     [\\n         (b'', b'accept', False, _(b'accept revisions')),\\n         (b'', b'reject', False, _(b'reject revisions')),\\n         (b'', b'abandon', False, _(b'abandon revisions')),\\n         (b'', b'reclaim', False, _(b'reclaim revisions')),\\n         (b'm', b'comment', b'', _(b'comment on the last revision')),\\n     ],\\n     _(b'DREVSPEC [OPTIONS]'),\\n     helpcategory=command.CATEGORY_IMPORT_EXPORT,\\n )\\n def phabupdate(ui, repo, spec, **opts):\\n     \\\"\\\"\\\"update Differential Revision in batch\\n \\n     DREVSPEC selects revisions. See :hg:`help phabread` for its usage.\\n     \\\"\\\"\\\"\\n     opts = pycompat.byteskwargs(opts)\\n     flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]\\n     if len(flags) \\u003e 1:\\n         raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))\\n \\n     actions = []\\n     for f in flags:\\n         actions.append({b'type': f, b'value': True})\\n \\n     drevs = querydrev(repo.ui, spec)\\n     for i, drev in enumerate(drevs):\\n         if i + 1 == len(drevs) and opts.get(b'comment'):\\n             actions.append({b'type': b'comment', b'value': opts[b'comment']})\\n         if actions:\\n             params = {\\n                 b'objectIdentifier': drev[b'phid'],\\n                 b'transactions': actions,\\n             }\\n             callconduit(ui, b'differential.revision.edit', params)\\n \\n \\n @eh.templatekeyword(b'phabreview', requires={b'ctx'})\\n def template_review(context, mapping):\\n     \\\"\\\"\\\":phabreview: Object describing the review for this changeset.\\n     Has attributes `url` and `id`.\\n     \\\"\\\"\\\"\\n     ctx = context.resource(mapping, b'ctx')\\n     m = _differentialrevisiondescre.search(ctx.description())\\n     if m:\\n         return templateutil.hybriddict(\\n             {b'url': m.group('url'), b'id': b\\\"D%s\\\" % m.group('id'),}\\n         )\\n     else:\\n         tags = ctx.repo().nodetags(ctx.node())\\n         for t in tags:\\n             if _differentialrevisiontagre.match(t):\\n                 url = ctx.repo().ui.config(b'phabricator', b'url')\\n                 if not url.endswith(b'\\/'):\\n                     url += b'\\/'\\n                 url += t\\n \\n                 return templateutil.hybriddict({b'url': url, b'id': t,})\\n     return None\\n \\n \\n @eh.templatekeyword(b'phabstatus', requires={b'ctx', b'repo', b'ui'})\\n def template_status(context, mapping):\\n     \\\"\\\"\\\":phabstatus: String. Status of Phabricator differential.\\n     \\\"\\\"\\\"\\n     ctx = context.resource(mapping, b'ctx')\\n     repo = context.resource(mapping, b'repo')\\n     ui = context.resource(mapping, b'ui')\\n \\n     rev = ctx.rev()\\n     try:\\n         drevid = getdrevmap(repo, [rev])[rev]\\n     except KeyError:\\n         return None\\n     drevs = callconduit(ui, b'differential.query', {b'ids': [drevid]})\\n     for drev in drevs:\\n         if int(drev[b'id']) == drevid:\\n             return templateutil.hybriddict(\\n                 {b'url': drev[b'uri'], b'status': drev[b'statusName'],}\\n             )\\n     return None\\n \\n \\n @show.showview(b'phabstatus', csettopic=b'work')\\n def phabstatusshowview(ui, repo, displayer):\\n     \\\"\\\"\\\"Phabricator differiential status\\\"\\\"\\\"\\n     revs = repo.revs('sort(_underway(), topo)')\\n     drevmap = getdrevmap(repo, revs)\\n     unknownrevs, drevids, revsbydrevid = [], set([]), {}\\n     for rev, drevid in pycompat.iteritems(drevmap):\\n         if drevid is not None:\\n             drevids.add(drevid)\\n             revsbydrevid.setdefault(drevid, set([])).add(rev)\\n         else:\\n             unknownrevs.append(rev)\\n \\n     drevs = callconduit(ui, b'differential.query', {b'ids': list(drevids)})\\n     drevsbyrev = {}\\n     for drev in drevs:\\n         for rev in revsbydrevid[int(drev[b'id'])]:\\n             drevsbyrev[rev] = drev\\n \\n     def phabstatus(ctx):\\n         drev = drevsbyrev[ctx.rev()]\\n         status = ui.label(\\n             b'%(statusName)s' % drev,\\n             b'phabricator.status.%s' % _getstatusname(drev),\\n         )\\n         ui.write(b\\\"\\\\n%s %s\\\\n\\\" % (drev[b'uri'], status))\\n \\n     revs -= smartset.baseset(unknownrevs)\\n     revdag = graphmod.dagwalker(repo, revs)\\n \\n     ui.setconfig(b'experimental', b'graphshorten', True)\\n     displayer._exthook = phabstatus\\n     nodelen = show.longestshortest(repo, revs)\\n     logcmdutil.displaygraph(\\n         ui,\\n         repo,\\n         revdag,\\n         displayer,\\n         graphmod.asciiedges,\\n         props={b'nodelen': nodelen},\\n     )\\n\"}]}],\"properties\":[]},\"20442\":{\"id\":\"20442\",\"revisionID\":\"8206\",\"dateCreated\":\"1581962881\",\"dateModified\":\"1583327825\",\"sourceControlBaseRevision\":\"72c6190de577bb2bb448eb2b14121e4ef85d08ff\",\"sourceControlPath\":null,\"sourceControlSystem\":\"hg\",\"branch\":null,\"bookmark\":null,\"creationMethod\":\"commit\",\"description\":\"rHG9b46270917348950e3fb1e73a5c9e46038065622\",\"unitStatus\":\"6\",\"lintStatus\":\"6\",\"changes\":[{\"id\":\"55597\",\"metadata\":{\"line:first\":1186,\"hash.effect\":\"QoC7Ipbh4yf0\"},\"oldPath\":\"hgext\\/phabricator.py\",\"currentPath\":\"hgext\\/phabricator.py\",\"awayPaths\":[],\"oldProperties\":[],\"newProperties\":[],\"type\":\"2\",\"fileType\":\"1\",\"commitHash\":null,\"addLines\":\"6\",\"delLines\":\"6\",\"hunks\":[{\"oldOffset\":\"1\",\"newOffset\":\"1\",\"oldLength\":\"1799\",\"newLength\":\"1799\",\"addLines\":null,\"delLines\":null,\"isMissingOldNewline\":null,\"isMissingNewNewline\":null,\"corpus\":\" # phabricator.py - simple Phabricator integration\\n #\\n # Copyright 2017 Facebook, Inc.\\n #\\n # This software may be used and distributed according to the terms of the\\n # GNU General Public License version 2 or any later version.\\n \\\"\\\"\\\"simple Phabricator integration (EXPERIMENTAL)\\n \\n This extension provides a ``phabsend`` command which sends a stack of\\n changesets to Phabricator, and a ``phabread`` command which prints a stack of\\n revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command\\n to update statuses in batch.\\n \\n A \\\"phabstatus\\\" view for :hg:`show` is also provided; it displays status\\n information of Phabricator differentials associated with unfinished\\n changesets.\\n \\n By default, Phabricator requires ``Test Plan`` which might prevent some\\n changeset from being sent. The requirement could be disabled by changing\\n ``differential.require-test-plan-field`` config server side.\\n \\n Config::\\n \\n     [phabricator]\\n     # Phabricator URL\\n     url = https:\\/\\/phab.example.com\\/\\n \\n     # Repo callsign. If a repo has a URL https:\\/\\/$HOST\\/diffusion\\/FOO, then its\\n     # callsign is \\\"FOO\\\".\\n     callsign = FOO\\n \\n     # curl command to use. If not set (default), use builtin HTTP library to\\n     # communicate. If set, use the specified curl command. This could be useful\\n     # if you need to specify advanced options that is not easily supported by\\n     # the internal library.\\n     curlcmd = curl --connect-timeout 2 --retry 3 --silent\\n \\n     [auth]\\n     example.schemes = https\\n     example.prefix = phab.example.com\\n \\n     # API token. Get it from https:\\/\\/$HOST\\/conduit\\/login\\/\\n     example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx\\n \\\"\\\"\\\"\\n \\n from __future__ import absolute_import\\n \\n import base64\\n import contextlib\\n import hashlib\\n import itertools\\n import json\\n import mimetypes\\n import operator\\n import re\\n \\n from mercurial.node import bin, nullid\\n from mercurial.i18n import _\\n from mercurial.pycompat import getattr\\n from mercurial.thirdparty import attr\\n from mercurial import (\\n     cmdutil,\\n     context,\\n     encoding,\\n     error,\\n     exthelper,\\n     graphmod,\\n     httpconnection as httpconnectionmod,\\n     localrepo,\\n     logcmdutil,\\n     match,\\n     mdiff,\\n     obsutil,\\n     parser,\\n     patch,\\n     phases,\\n     pycompat,\\n     scmutil,\\n     smartset,\\n     tags,\\n     templatefilters,\\n     templateutil,\\n     url as urlmod,\\n     util,\\n )\\n from mercurial.utils import (\\n     procutil,\\n     stringutil,\\n )\\n from . import show\\n \\n \\n # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for\\n # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should\\n # be specifying the version(s) of Mercurial they are tested with, or\\n # leave the attribute unspecified.\\n testedwith = b'ships-with-hg-core'\\n \\n eh = exthelper.exthelper()\\n \\n cmdtable = eh.cmdtable\\n command = eh.command\\n configtable = eh.configtable\\n templatekeyword = eh.templatekeyword\\n uisetup = eh.finaluisetup\\n \\n # developer config: phabricator.batchsize\\n eh.configitem(\\n     b'phabricator', b'batchsize', default=12,\\n )\\n eh.configitem(\\n     b'phabricator', b'callsign', default=None,\\n )\\n eh.configitem(\\n     b'phabricator', b'curlcmd', default=None,\\n )\\n # developer config: phabricator.repophid\\n eh.configitem(\\n     b'phabricator', b'repophid', default=None,\\n )\\n eh.configitem(\\n     b'phabricator', b'url', default=None,\\n )\\n eh.configitem(\\n     b'phabsend', b'confirm', default=False,\\n )\\n \\n colortable = {\\n     b'phabricator.action.created': b'green',\\n     b'phabricator.action.skipped': b'magenta',\\n     b'phabricator.action.updated': b'magenta',\\n     b'phabricator.desc': b'',\\n     b'phabricator.drev': b'bold',\\n     b'phabricator.node': b'',\\n     b'phabricator.status.abandoned': b'magenta dim',\\n     b'phabricator.status.accepted': b'green bold',\\n     b'phabricator.status.closed': b'green',\\n     b'phabricator.status.needsreview': b'yellow',\\n     b'phabricator.status.needsrevision': b'red',\\n     b'phabricator.status.changesplanned': b'red',\\n }\\n \\n _VCR_FLAGS = [\\n     (\\n         b'',\\n         b'test-vcr',\\n         b'',\\n         _(\\n             b'Path to a vcr file. If nonexistent, will record a new vcr transcript'\\n             b', otherwise will mock all http requests using the specified vcr file.'\\n             b' (ADVANCED)'\\n         ),\\n     ),\\n ]\\n \\n \\n @eh.wrapfunction(localrepo, \\\"loadhgrc\\\")\\n def _loadhgrc(orig, ui, wdirvfs, hgvfs, requirements):\\n     \\\"\\\"\\\"Load ``.arcconfig`` content into a ui instance on repository open.\\n     \\\"\\\"\\\"\\n     result = False\\n     arcconfig = {}\\n \\n     try:\\n         # json.loads only accepts bytes from 3.6+\\n         rawparams = encoding.unifromlocal(wdirvfs.read(b\\\".arcconfig\\\"))\\n         # json.loads only returns unicode strings\\n         arcconfig = pycompat.rapply(\\n             lambda x: encoding.unitolocal(x)\\n             if isinstance(x, pycompat.unicode)\\n             else x,\\n             pycompat.json_loads(rawparams),\\n         )\\n \\n         result = True\\n     except ValueError:\\n         ui.warn(_(b\\\"invalid JSON in %s\\\\n\\\") % wdirvfs.join(b\\\".arcconfig\\\"))\\n     except IOError:\\n         pass\\n \\n     cfg = util.sortdict()\\n \\n     if b\\\"repository.callsign\\\" in arcconfig:\\n         cfg[(b\\\"phabricator\\\", b\\\"callsign\\\")] = arcconfig[b\\\"repository.callsign\\\"]\\n \\n     if b\\\"phabricator.uri\\\" in arcconfig:\\n         cfg[(b\\\"phabricator\\\", b\\\"url\\\")] = arcconfig[b\\\"phabricator.uri\\\"]\\n \\n     if cfg:\\n         ui.applyconfig(cfg, source=wdirvfs.join(b\\\".arcconfig\\\"))\\n \\n     return orig(ui, wdirvfs, hgvfs, requirements) or result  # Load .hg\\/hgrc\\n \\n \\n def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):\\n     fullflags = flags + _VCR_FLAGS\\n \\n     def hgmatcher(r1, r2):\\n         if r1.uri != r2.uri or r1.method != r2.method:\\n             return False\\n         r1params = util.urlreq.parseqs(r1.body)\\n         r2params = util.urlreq.parseqs(r2.body)\\n         for key in r1params:\\n             if key not in r2params:\\n                 return False\\n             value = r1params[key][0]\\n             # we want to compare json payloads without worrying about ordering\\n             if value.startswith(b'{') and value.endswith(b'}'):\\n                 r1json = pycompat.json_loads(value)\\n                 r2json = pycompat.json_loads(r2params[key][0])\\n                 if r1json != r2json:\\n                     return False\\n             elif r2params[key][0] != value:\\n                 return False\\n         return True\\n \\n     def sanitiserequest(request):\\n         request.body = re.sub(\\n             br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body\\n         )\\n         return request\\n \\n     def sanitiseresponse(response):\\n         if 'set-cookie' in response['headers']:\\n             del response['headers']['set-cookie']\\n         return response\\n \\n     def decorate(fn):\\n         def inner(*args, **kwargs):\\n             cassette = pycompat.fsdecode(kwargs.pop('test_vcr', None))\\n             if cassette:\\n                 import hgdemandimport\\n \\n                 with hgdemandimport.deactivated():\\n                     import vcr as vcrmod\\n                     import vcr.stubs as stubs\\n \\n                     vcr = vcrmod.VCR(\\n                         serializer='json',\\n                         before_record_request=sanitiserequest,\\n                         before_record_response=sanitiseresponse,\\n                         custom_patches=[\\n                             (\\n                                 urlmod,\\n                                 'httpconnection',\\n                                 stubs.VCRHTTPConnection,\\n                             ),\\n                             (\\n                                 urlmod,\\n                                 'httpsconnection',\\n                                 stubs.VCRHTTPSConnection,\\n                             ),\\n                         ],\\n                     )\\n                     vcr.register_matcher('hgmatcher', hgmatcher)\\n                     with vcr.use_cassette(cassette, match_on=['hgmatcher']):\\n                         return fn(*args, **kwargs)\\n             return fn(*args, **kwargs)\\n \\n         inner.__name__ = fn.__name__\\n         inner.__doc__ = fn.__doc__\\n         return command(\\n             name,\\n             fullflags,\\n             spec,\\n             helpcategory=helpcategory,\\n             optionalrepo=optionalrepo,\\n         )(inner)\\n \\n     return decorate\\n \\n \\n def urlencodenested(params):\\n     \\\"\\\"\\\"like urlencode, but works with nested parameters.\\n \\n     For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be\\n     flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to\\n     urlencode. Note: the encoding is consistent with PHP's http_build_query.\\n     \\\"\\\"\\\"\\n     flatparams = util.sortdict()\\n \\n     def process(prefix, obj):\\n         if isinstance(obj, bool):\\n             obj = {True: b'true', False: b'false'}[obj]  # Python -\\u003e PHP form\\n         lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]\\n         items = {list: lister, dict: lambda x: x.items()}.get(type(obj))\\n         if items is None:\\n             flatparams[prefix] = obj\\n         else:\\n             for k, v in items(obj):\\n                 if prefix:\\n                     process(b'%s[%s]' % (prefix, k), v)\\n                 else:\\n                     process(k, v)\\n \\n     process(b'', params)\\n     return util.urlreq.urlencode(flatparams)\\n \\n \\n def readurltoken(ui):\\n     \\\"\\\"\\\"return conduit url, token and make sure they exist\\n \\n     Currently read from [auth] config section. In the future, it might\\n     make sense to read from .arcconfig and .arcrc as well.\\n     \\\"\\\"\\\"\\n     url = ui.config(b'phabricator', b'url')\\n     if not url:\\n         raise error.Abort(\\n             _(b'config %s.%s is required') % (b'phabricator', b'url')\\n         )\\n \\n     res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)\\n     token = None\\n \\n     if res:\\n         group, auth = res\\n \\n         ui.debug(b\\\"using auth.%s.* for authentication\\\\n\\\" % group)\\n \\n         token = auth.get(b'phabtoken')\\n \\n     if not token:\\n         raise error.Abort(\\n             _(b'Can\\\\'t find conduit token associated to %s') % (url,)\\n         )\\n \\n     return url, token\\n \\n \\n def callconduit(ui, name, params):\\n     \\\"\\\"\\\"call Conduit API, params is a dict. return json.loads result, or None\\\"\\\"\\\"\\n     host, token = readurltoken(ui)\\n     url, authinfo = util.url(b'\\/'.join([host, b'api', name])).authinfo()\\n     ui.debug(b'Conduit Call: %s %s\\\\n' % (url, pycompat.byterepr(params)))\\n     params = params.copy()\\n     params[b'__conduit__'] = {\\n         b'token': token,\\n     }\\n     rawdata = {\\n         b'params': templatefilters.json(params),\\n         b'output': b'json',\\n         b'__conduit__': 1,\\n     }\\n     data = urlencodenested(rawdata)\\n     curlcmd = ui.config(b'phabricator', b'curlcmd')\\n     if curlcmd:\\n         sin, sout = procutil.popen2(\\n             b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))\\n         )\\n         sin.write(data)\\n         sin.close()\\n         body = sout.read()\\n     else:\\n         urlopener = urlmod.opener(ui, authinfo)\\n         request = util.urlreq.request(pycompat.strurl(url), data=data)\\n         with contextlib.closing(urlopener.open(request)) as rsp:\\n             body = rsp.read()\\n     ui.debug(b'Conduit Response: %s\\\\n' % body)\\n     parsed = pycompat.rapply(\\n         lambda x: encoding.unitolocal(x)\\n         if isinstance(x, pycompat.unicode)\\n         else x,\\n         # json.loads only accepts bytes from py3.6+\\n         pycompat.json_loads(encoding.unifromlocal(body)),\\n     )\\n     if parsed.get(b'error_code'):\\n         msg = _(b'Conduit Error (%s): %s') % (\\n             parsed[b'error_code'],\\n             parsed[b'error_info'],\\n         )\\n         raise error.Abort(msg)\\n     return parsed[b'result']\\n \\n \\n @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)\\n def debugcallconduit(ui, repo, name):\\n     \\\"\\\"\\\"call Conduit API\\n \\n     Call parameters are read from stdin as a JSON blob. Result will be written\\n     to stdout as a JSON blob.\\n     \\\"\\\"\\\"\\n     # json.loads only accepts bytes from 3.6+\\n     rawparams = encoding.unifromlocal(ui.fin.read())\\n     # json.loads only returns unicode strings\\n     params = pycompat.rapply(\\n         lambda x: encoding.unitolocal(x)\\n         if isinstance(x, pycompat.unicode)\\n         else x,\\n         pycompat.json_loads(rawparams),\\n     )\\n     # json.dumps only accepts unicode strings\\n     result = pycompat.rapply(\\n         lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,\\n         callconduit(ui, name, params),\\n     )\\n     s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))\\n     ui.write(b'%s\\\\n' % encoding.unitolocal(s))\\n \\n \\n def getrepophid(repo):\\n     \\\"\\\"\\\"given callsign, return repository PHID or None\\\"\\\"\\\"\\n     # developer config: phabricator.repophid\\n     repophid = repo.ui.config(b'phabricator', b'repophid')\\n     if repophid:\\n         return repophid\\n     callsign = repo.ui.config(b'phabricator', b'callsign')\\n     if not callsign:\\n         return None\\n     query = callconduit(\\n         repo.ui,\\n         b'diffusion.repository.search',\\n         {b'constraints': {b'callsigns': [callsign]}},\\n     )\\n     if len(query[b'data']) == 0:\\n         return None\\n     repophid = query[b'data'][0][b'phid']\\n     repo.ui.setconfig(b'phabricator', b'repophid', repophid)\\n     return repophid\\n \\n \\n _differentialrevisiontagre = re.compile(br'\\\\AD([1-9][0-9]*)\\\\Z')\\n _differentialrevisiondescre = re.compile(\\n     br'^Differential Revision:\\\\s*(?P\\u003curl\\u003e(?:.*)D(?P\\u003cid\\u003e[1-9][0-9]*))$', re.M\\n )\\n \\n \\n def getoldnodedrevmap(repo, nodelist):\\n     \\\"\\\"\\\"find previous nodes that has been sent to Phabricator\\n \\n     return {node: (oldnode, Differential diff, Differential Revision ID)}\\n     for node in nodelist with known previous sent versions, or associated\\n     Differential Revision IDs. ``oldnode`` and ``Differential diff`` could\\n     be ``None``.\\n \\n     Examines commit messages like \\\"Differential Revision:\\\" to get the\\n     association information.\\n \\n     If such commit message line is not found, examines all precursors and their\\n     tags. Tags with format like \\\"D1234\\\" are considered a match and the node\\n     with that tag, and the number after \\\"D\\\" (ex. 1234) will be returned.\\n \\n     The ``old node``, if not None, is guaranteed to be the last diff of\\n     corresponding Differential Revision, and exist in the repo.\\n     \\\"\\\"\\\"\\n     unfi = repo.unfiltered()\\n     has_node = unfi.changelog.index.has_node\\n \\n     result = {}  # {node: (oldnode?, lastdiff?, drev)}\\n     toconfirm = {}  # {node: (force, {precnode}, drev)}\\n     for node in nodelist:\\n         ctx = unfi[node]\\n         # For tags like \\\"D123\\\", put them into \\\"toconfirm\\\" to verify later\\n         precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))\\n         for n in precnodes:\\n             if has_node(n):\\n                 for tag in unfi.nodetags(n):\\n                     m = _differentialrevisiontagre.match(tag)\\n                     if m:\\n                         toconfirm[node] = (0, set(precnodes), int(m.group(1)))\\n                         break\\n                 else:\\n                     continue  # move to next predecessor\\n                 break  # found a tag, stop\\n         else:\\n             # Check commit message\\n             m = _differentialrevisiondescre.search(ctx.description())\\n             if m:\\n                 toconfirm[node] = (1, set(precnodes), int(m.group('id')))\\n \\n     # Double check if tags are genuine by collecting all old nodes from\\n     # Phabricator, and expect precursors overlap with it.\\n     if toconfirm:\\n         drevs = [drev for force, precs, drev in toconfirm.values()]\\n         alldiffs = callconduit(\\n             unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}\\n         )\\n         getnode = lambda d: bin(getdiffmeta(d).get(b'node', b'')) or None\\n         for newnode, (force, precset, drev) in toconfirm.items():\\n             diffs = [\\n                 d for d in alldiffs.values() if int(d[b'revisionID']) == drev\\n             ]\\n \\n             # \\\"precursors\\\" as known by Phabricator\\n             phprecset = set(getnode(d) for d in diffs)\\n \\n             # Ignore if precursors (Phabricator and local repo) do not overlap,\\n             # and force is not set (when commit message says nothing)\\n             if not force and not bool(phprecset & precset):\\n                 tagname = b'D%d' % drev\\n                 tags.tag(\\n                     repo,\\n                     tagname,\\n                     nullid,\\n                     message=None,\\n                     user=None,\\n                     date=None,\\n                     local=True,\\n                 )\\n                 unfi.ui.warn(\\n                     _(\\n                         b'D%d: local tag removed - does not match '\\n                         b'Differential history\\\\n'\\n                     )\\n                     % drev\\n                 )\\n                 continue\\n \\n             # Find the last node using Phabricator metadata, and make sure it\\n             # exists in the repo\\n             oldnode = lastdiff = None\\n             if diffs:\\n                 lastdiff = max(diffs, key=lambda d: int(d[b'id']))\\n                 oldnode = getnode(lastdiff)\\n                 if oldnode and not has_node(oldnode):\\n                     oldnode = None\\n \\n             result[newnode] = (oldnode, lastdiff, drev)\\n \\n     return result\\n \\n \\n def getdrevmap(repo, revs):\\n     \\\"\\\"\\\"Return a dict mapping each rev in `revs` to their Differential Revision\\n     ID or None.\\n     \\\"\\\"\\\"\\n     result = {}\\n     for rev in revs:\\n         result[rev] = None\\n         ctx = repo[rev]\\n         # Check commit message\\n         m = _differentialrevisiondescre.search(ctx.description())\\n         if m:\\n             result[rev] = int(m.group('id'))\\n             continue\\n         # Check tags\\n         for tag in repo.nodetags(ctx.node()):\\n             m = _differentialrevisiontagre.match(tag)\\n             if m:\\n                 result[rev] = int(m.group(1))\\n                 break\\n \\n     return result\\n \\n \\n def getdiff(ctx, diffopts):\\n     \\\"\\\"\\\"plain-text diff without header (user, commit message, etc)\\\"\\\"\\\"\\n     output = util.stringio()\\n     for chunk, _label in patch.diffui(\\n         ctx.repo(), ctx.p1().node(), ctx.node(), None, opts=diffopts\\n     ):\\n         output.write(chunk)\\n     return output.getvalue()\\n \\n \\n class DiffChangeType(object):\\n     ADD = 1\\n     CHANGE = 2\\n     DELETE = 3\\n     MOVE_AWAY = 4\\n     COPY_AWAY = 5\\n     MOVE_HERE = 6\\n     COPY_HERE = 7\\n     MULTICOPY = 8\\n \\n \\n class DiffFileType(object):\\n     TEXT = 1\\n     IMAGE = 2\\n     BINARY = 3\\n \\n \\n @attr.s\\n class phabhunk(dict):\\n     \\\"\\\"\\\"Represents a Differential hunk, which is owned by a Differential change\\n     \\\"\\\"\\\"\\n \\n     oldOffset = attr.ib(default=0)  # camelcase-required\\n     oldLength = attr.ib(default=0)  # camelcase-required\\n     newOffset = attr.ib(default=0)  # camelcase-required\\n     newLength = attr.ib(default=0)  # camelcase-required\\n     corpus = attr.ib(default='')\\n     # These get added to the phabchange's equivalents\\n     addLines = attr.ib(default=0)  # camelcase-required\\n     delLines = attr.ib(default=0)  # camelcase-required\\n \\n \\n @attr.s\\n class phabchange(object):\\n     \\\"\\\"\\\"Represents a Differential change, owns Differential hunks and owned by a\\n     Differential diff.  Each one represents one file in a diff.\\n     \\\"\\\"\\\"\\n \\n     currentPath = attr.ib(default=None)  # camelcase-required\\n     oldPath = attr.ib(default=None)  # camelcase-required\\n     awayPaths = attr.ib(default=attr.Factory(list))  # camelcase-required\\n     metadata = attr.ib(default=attr.Factory(dict))\\n     oldProperties = attr.ib(default=attr.Factory(dict))  # camelcase-required\\n     newProperties = attr.ib(default=attr.Factory(dict))  # camelcase-required\\n     type = attr.ib(default=DiffChangeType.CHANGE)\\n     fileType = attr.ib(default=DiffFileType.TEXT)  # camelcase-required\\n     commitHash = attr.ib(default=None)  # camelcase-required\\n     addLines = attr.ib(default=0)  # camelcase-required\\n     delLines = attr.ib(default=0)  # camelcase-required\\n     hunks = attr.ib(default=attr.Factory(list))\\n \\n     def copynewmetadatatoold(self):\\n         for key in list(self.metadata.keys()):\\n             newkey = key.replace(b'new:', b'old:')\\n             self.metadata[newkey] = self.metadata[key]\\n \\n     def addoldmode(self, value):\\n         self.oldProperties[b'unix:filemode'] = value\\n \\n     def addnewmode(self, value):\\n         self.newProperties[b'unix:filemode'] = value\\n \\n     def addhunk(self, hunk):\\n         if not isinstance(hunk, phabhunk):\\n             raise error.Abort(b'phabchange.addhunk only takes phabhunks')\\n         self.hunks.append(pycompat.byteskwargs(attr.asdict(hunk)))\\n         # It's useful to include these stats since the Phab web UI shows them,\\n         # and uses them to estimate how large a change a Revision is. Also used\\n         # in email subjects for the [+++--] bit.\\n         self.addLines += hunk.addLines\\n         self.delLines += hunk.delLines\\n \\n \\n @attr.s\\n class phabdiff(object):\\n     \\\"\\\"\\\"Represents a Differential diff, owns Differential changes.  Corresponds\\n     to a commit.\\n     \\\"\\\"\\\"\\n \\n     # Doesn't seem to be any reason to send this (output of uname -n)\\n     sourceMachine = attr.ib(default=b'')  # camelcase-required\\n     sourcePath = attr.ib(default=b'\\/')  # camelcase-required\\n     sourceControlBaseRevision = attr.ib(default=b'0' * 40)  # camelcase-required\\n     sourceControlPath = attr.ib(default=b'\\/')  # camelcase-required\\n     sourceControlSystem = attr.ib(default=b'hg')  # camelcase-required\\n     branch = attr.ib(default=b'default')\\n     bookmark = attr.ib(default=None)\\n     creationMethod = attr.ib(default=b'phabsend')  # camelcase-required\\n     lintStatus = attr.ib(default=b'none')  # camelcase-required\\n     unitStatus = attr.ib(default=b'none')  # camelcase-required\\n     changes = attr.ib(default=attr.Factory(dict))\\n     repositoryPHID = attr.ib(default=None)  # camelcase-required\\n \\n     def addchange(self, change):\\n         if not isinstance(change, phabchange):\\n             raise error.Abort(b'phabdiff.addchange only takes phabchanges')\\n         self.changes[change.currentPath] = pycompat.byteskwargs(\\n             attr.asdict(change)\\n         )\\n \\n \\n def maketext(pchange, ctx, fname):\\n     \\\"\\\"\\\"populate the phabchange for a text file\\\"\\\"\\\"\\n     repo = ctx.repo()\\n     fmatcher = match.exact([fname])\\n     diffopts = mdiff.diffopts(git=True, context=32767)\\n     _pfctx, _fctx, header, fhunks = next(\\n         patch.diffhunks(repo, ctx.p1(), ctx, fmatcher, opts=diffopts)\\n     )\\n \\n     for fhunk in fhunks:\\n         (oldOffset, oldLength, newOffset, newLength), lines = fhunk\\n         corpus = b''.join(lines[1:])\\n         shunk = list(header)\\n         shunk.extend(lines)\\n         _mf, _mt, addLines, delLines, _hb = patch.diffstatsum(\\n             patch.diffstatdata(util.iterlines(shunk))\\n         )\\n         pchange.addhunk(\\n             phabhunk(\\n                 oldOffset,\\n                 oldLength,\\n                 newOffset,\\n                 newLength,\\n                 corpus,\\n                 addLines,\\n                 delLines,\\n             )\\n         )\\n \\n \\n def uploadchunks(fctx, fphid):\\n     \\\"\\\"\\\"upload large binary files as separate chunks.\\n     Phab requests chunking over 8MiB, and splits into 4MiB chunks\\n     \\\"\\\"\\\"\\n     ui = fctx.repo().ui\\n     chunks = callconduit(ui, b'file.querychunks', {b'filePHID': fphid})\\n     with ui.makeprogress(\\n         _(b'uploading file chunks'), unit=_(b'chunks'), total=len(chunks)\\n     ) as progress:\\n         for chunk in chunks:\\n             progress.increment()\\n             if chunk[b'complete']:\\n                 continue\\n             bstart = int(chunk[b'byteStart'])\\n             bend = int(chunk[b'byteEnd'])\\n             callconduit(\\n                 ui,\\n                 b'file.uploadchunk',\\n                 {\\n                     b'filePHID': fphid,\\n                     b'byteStart': bstart,\\n                     b'data': base64.b64encode(fctx.data()[bstart:bend]),\\n                     b'dataEncoding': b'base64',\\n                 },\\n             )\\n \\n \\n def uploadfile(fctx):\\n     \\\"\\\"\\\"upload binary files to Phabricator\\\"\\\"\\\"\\n     repo = fctx.repo()\\n     ui = repo.ui\\n     fname = fctx.path()\\n     size = fctx.size()\\n     fhash = pycompat.bytestr(hashlib.sha256(fctx.data()).hexdigest())\\n \\n     # an allocate call is required first to see if an upload is even required\\n     # (Phab might already have it) and to determine if chunking is needed\\n     allocateparams = {\\n         b'name': fname,\\n         b'contentLength': size,\\n         b'contentHash': fhash,\\n     }\\n     filealloc = callconduit(ui, b'file.allocate', allocateparams)\\n     fphid = filealloc[b'filePHID']\\n \\n     if filealloc[b'upload']:\\n         ui.write(_(b'uploading %s\\\\n') % bytes(fctx))\\n         if not fphid:\\n             uploadparams = {\\n                 b'name': fname,\\n                 b'data_base64': base64.b64encode(fctx.data()),\\n             }\\n             fphid = callconduit(ui, b'file.upload', uploadparams)\\n         else:\\n             uploadchunks(fctx, fphid)\\n     else:\\n         ui.debug(b'server already has %s\\\\n' % bytes(fctx))\\n \\n     if not fphid:\\n         raise error.Abort(b'Upload of %s failed.' % bytes(fctx))\\n \\n     return fphid\\n \\n \\n def addoldbinary(pchange, fctx):\\n     \\\"\\\"\\\"add the metadata for the previous version of a binary file to the\\n     phabchange for the new version\\n     \\\"\\\"\\\"\\n     oldfctx = fctx.p1()\\n     if fctx.cmp(oldfctx):\\n         # Files differ, add the old one\\n         pchange.metadata[b'old:file:size'] = oldfctx.size()\\n         mimeguess, _enc = mimetypes.guess_type(\\n             encoding.unifromlocal(oldfctx.path())\\n         )\\n         if mimeguess:\\n             pchange.metadata[b'old:file:mime-type'] = pycompat.bytestr(\\n                 mimeguess\\n             )\\n         fphid = uploadfile(oldfctx)\\n         pchange.metadata[b'old:binary-phid'] = fphid\\n     else:\\n         # If it's left as IMAGE\\/BINARY web UI might try to display it\\n         pchange.fileType = DiffFileType.TEXT\\n         pchange.copynewmetadatatoold()\\n \\n \\n def makebinary(pchange, fctx):\\n     \\\"\\\"\\\"populate the phabchange for a binary file\\\"\\\"\\\"\\n     pchange.fileType = DiffFileType.BINARY\\n     fphid = uploadfile(fctx)\\n     pchange.metadata[b'new:binary-phid'] = fphid\\n     pchange.metadata[b'new:file:size'] = fctx.size()\\n     mimeguess, _enc = mimetypes.guess_type(encoding.unifromlocal(fctx.path()))\\n     if mimeguess:\\n         mimeguess = pycompat.bytestr(mimeguess)\\n         pchange.metadata[b'new:file:mime-type'] = mimeguess\\n         if mimeguess.startswith(b'image\\/'):\\n             pchange.fileType = DiffFileType.IMAGE\\n \\n \\n # Copied from mercurial\\/patch.py\\n gitmode = {b'l': b'120000', b'x': b'100755', b'': b'100644'}\\n \\n \\n def notutf8(fctx):\\n     \\\"\\\"\\\"detect non-UTF-8 text files since Phabricator requires them to be marked\\n     as binary\\n     \\\"\\\"\\\"\\n     try:\\n         fctx.data().decode('utf-8')\\n         if fctx.parents():\\n             fctx.p1().data().decode('utf-8')\\n         return False\\n     except UnicodeDecodeError:\\n         fctx.repo().ui.write(\\n             _(b'file %s detected as non-UTF-8, marked as binary\\\\n')\\n             % fctx.path()\\n         )\\n         return True\\n \\n \\n def addremoved(pdiff, ctx, removed):\\n     \\\"\\\"\\\"add removed files to the phabdiff. Shouldn't include moves\\\"\\\"\\\"\\n     for fname in removed:\\n         pchange = phabchange(\\n             currentPath=fname, oldPath=fname, type=DiffChangeType.DELETE\\n         )\\n         pchange.addoldmode(gitmode[ctx.p1()[fname].flags()])\\n         fctx = ctx.p1()[fname]\\n         if not (fctx.isbinary() or notutf8(fctx)):\\n             maketext(pchange, ctx, fname)\\n \\n         pdiff.addchange(pchange)\\n \\n \\n def addmodified(pdiff, ctx, modified):\\n     \\\"\\\"\\\"add modified files to the phabdiff\\\"\\\"\\\"\\n     for fname in modified:\\n         fctx = ctx[fname]\\n         pchange = phabchange(currentPath=fname, oldPath=fname)\\n         filemode = gitmode[ctx[fname].flags()]\\n         originalmode = gitmode[ctx.p1()[fname].flags()]\\n         if filemode != originalmode:\\n             pchange.addoldmode(originalmode)\\n             pchange.addnewmode(filemode)\\n \\n         if fctx.isbinary() or notutf8(fctx):\\n             makebinary(pchange, fctx)\\n             addoldbinary(pchange, fctx)\\n         else:\\n             maketext(pchange, ctx, fname)\\n \\n         pdiff.addchange(pchange)\\n \\n \\n def addadded(pdiff, ctx, added, removed):\\n     \\\"\\\"\\\"add file adds to the phabdiff, both new files and copies\\/moves\\\"\\\"\\\"\\n     # Keep track of files that've been recorded as moved\\/copied, so if there are\\n     # additional copies we can mark them (moves get removed from removed)\\n     copiedchanges = {}\\n     movedchanges = {}\\n     for fname in added:\\n         fctx = ctx[fname]\\n         pchange = phabchange(currentPath=fname)\\n \\n         filemode = gitmode[ctx[fname].flags()]\\n         renamed = fctx.renamed()\\n \\n         if renamed:\\n             originalfname = renamed[0]\\n             originalmode = gitmode[ctx.p1()[originalfname].flags()]\\n             pchange.oldPath = originalfname\\n \\n             if originalfname in removed:\\n                 origpchange = phabchange(\\n                     currentPath=originalfname,\\n                     oldPath=originalfname,\\n                     type=DiffChangeType.MOVE_AWAY,\\n                     awayPaths=[fname],\\n                 )\\n                 movedchanges[originalfname] = origpchange\\n                 removed.remove(originalfname)\\n                 pchange.type = DiffChangeType.MOVE_HERE\\n             elif originalfname in movedchanges:\\n                 movedchanges[originalfname].type = DiffChangeType.MULTICOPY\\n                 movedchanges[originalfname].awayPaths.append(fname)\\n                 pchange.type = DiffChangeType.COPY_HERE\\n             else:  # pure copy\\n                 if originalfname not in copiedchanges:\\n                     origpchange = phabchange(\\n                         currentPath=originalfname, type=DiffChangeType.COPY_AWAY\\n                     )\\n                     copiedchanges[originalfname] = origpchange\\n                 else:\\n                     origpchange = copiedchanges[originalfname]\\n                 origpchange.awayPaths.append(fname)\\n                 pchange.type = DiffChangeType.COPY_HERE\\n \\n             if filemode != originalmode:\\n                 pchange.addoldmode(originalmode)\\n                 pchange.addnewmode(filemode)\\n         else:  # Brand-new file\\n             pchange.addnewmode(gitmode[fctx.flags()])\\n             pchange.type = DiffChangeType.ADD\\n \\n         if fctx.isbinary() or notutf8(fctx):\\n             makebinary(pchange, fctx)\\n             if renamed:\\n                 addoldbinary(pchange, fctx)\\n         else:\\n             maketext(pchange, ctx, fname)\\n \\n         pdiff.addchange(pchange)\\n \\n     for _path, copiedchange in copiedchanges.items():\\n         pdiff.addchange(copiedchange)\\n     for _path, movedchange in movedchanges.items():\\n         pdiff.addchange(movedchange)\\n \\n \\n def creatediff(ctx):\\n     \\\"\\\"\\\"create a Differential Diff\\\"\\\"\\\"\\n     repo = ctx.repo()\\n     repophid = getrepophid(repo)\\n     # Create a \\\"Differential Diff\\\" via \\\"differential.creatediff\\\" API\\n     pdiff = phabdiff(\\n         sourceControlBaseRevision=b'%s' % ctx.p1().hex(),\\n         branch=b'%s' % ctx.branch(),\\n     )\\n     modified, added, removed, _d, _u, _i, _c = ctx.p1().status(ctx)\\n     # addadded will remove moved files from removed, so addremoved won't get\\n     # them\\n     addadded(pdiff, ctx, added, removed)\\n     addmodified(pdiff, ctx, modified)\\n     addremoved(pdiff, ctx, removed)\\n     if repophid:\\n         pdiff.repositoryPHID = repophid\\n     diff = callconduit(\\n         repo.ui,\\n         b'differential.creatediff',\\n         pycompat.byteskwargs(attr.asdict(pdiff)),\\n     )\\n     if not diff:\\n         raise error.Abort(_(b'cannot create diff for %s') % ctx)\\n     return diff\\n \\n \\n def writediffproperties(ctx, diff):\\n     \\\"\\\"\\\"write metadata to diff so patches could be applied losslessly\\\"\\\"\\\"\\n     # creatediff returns with a diffid but query returns with an id\\n     diffid = diff.get(b'diffid', diff.get(b'id'))\\n     params = {\\n         b'diff_id': diffid,\\n         b'name': b'hg:meta',\\n         b'data': templatefilters.json(\\n             {\\n                 b'user': ctx.user(),\\n                 b'date': b'%d %d' % ctx.date(),\\n                 b'branch': ctx.branch(),\\n                 b'node': ctx.hex(),\\n                 b'parent': ctx.p1().hex(),\\n             }\\n         ),\\n     }\\n     callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)\\n \\n     params = {\\n         b'diff_id': diffid,\\n         b'name': b'local:commits',\\n         b'data': templatefilters.json(\\n             {\\n                 ctx.hex(): {\\n                     b'author': stringutil.person(ctx.user()),\\n                     b'authorEmail': stringutil.email(ctx.user()),\\n                     b'time': int(ctx.date()[0]),\\n                     b'commit': ctx.hex(),\\n                     b'parents': [ctx.p1().hex()],\\n                     b'branch': ctx.branch(),\\n                 },\\n             }\\n         ),\\n     }\\n     callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)\\n \\n \\n def createdifferentialrevision(\\n     ctx,\\n     revid=None,\\n     parentrevphid=None,\\n     oldnode=None,\\n     olddiff=None,\\n     actions=None,\\n     comment=None,\\n ):\\n     \\\"\\\"\\\"create or update a Differential Revision\\n \\n     If revid is None, create a new Differential Revision, otherwise update\\n     revid. If parentrevphid is not None, set it as a dependency.\\n \\n     If oldnode is not None, check if the patch content (without commit message\\n     and metadata) has changed before creating another diff.\\n \\n     If actions is not None, they will be appended to the transaction.\\n     \\\"\\\"\\\"\\n     repo = ctx.repo()\\n     if oldnode:\\n         diffopts = mdiff.diffopts(git=True, context=32767)\\n         oldctx = repo.unfiltered()[oldnode]\\n         neednewdiff = getdiff(ctx, diffopts) != getdiff(oldctx, diffopts)\\n     else:\\n         neednewdiff = True\\n \\n     transactions = []\\n     if neednewdiff:\\n         diff = creatediff(ctx)\\n         transactions.append({b'type': b'update', b'value': diff[b'phid']})\\n         if comment:\\n             transactions.append({b'type': b'comment', b'value': comment})\\n     else:\\n         # Even if we don't need to upload a new diff because the patch content\\n         # does not change. We might still need to update its metadata so\\n         # pushers could know the correct node metadata.\\n         assert olddiff\\n         diff = olddiff\\n     writediffproperties(ctx, diff)\\n \\n     # Set the parent Revision every time, so commit re-ordering is picked-up\\n     if parentrevphid:\\n         transactions.append(\\n             {b'type': b'parents.set', b'value': [parentrevphid]}\\n         )\\n \\n     if actions:\\n         transactions += actions\\n \\n     # Parse commit message and update related fields.\\n     desc = ctx.description()\\n     info = callconduit(\\n         repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}\\n     )\\n     for k, v in info[b'fields'].items():\\n         if k in [b'title', b'summary', b'testPlan']:\\n             transactions.append({b'type': k, b'value': v})\\n \\n     params = {b'transactions': transactions}\\n     if revid is not None:\\n         # Update an existing Differential Revision\\n         params[b'objectIdentifier'] = revid\\n \\n     revision = callconduit(repo.ui, b'differential.revision.edit', params)\\n     if not revision:\\n         raise error.Abort(_(b'cannot create revision for %s') % ctx)\\n \\n     return revision, diff\\n \\n \\n def userphids(repo, names):\\n     \\\"\\\"\\\"convert user names to PHIDs\\\"\\\"\\\"\\n     names = [name.lower() for name in names]\\n     query = {b'constraints': {b'usernames': names}}\\n     result = callconduit(repo.ui, b'user.search', query)\\n     # username not found is not an error of the API. So check if we have missed\\n     # some names here.\\n     data = result[b'data']\\n     resolved = set(entry[b'fields'][b'username'].lower() for entry in data)\\n     unresolved = set(names) - resolved\\n     if unresolved:\\n         raise error.Abort(\\n             _(b'unknown username: %s') % b' '.join(sorted(unresolved))\\n         )\\n     return [entry[b'phid'] for entry in data]\\n \\n \\n @vcrcommand(\\n     b'phabsend',\\n     [\\n         (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),\\n         (b'', b'amend', True, _(b'update commit messages')),\\n         (b'', b'reviewer', [], _(b'specify reviewers')),\\n         (b'', b'blocker', [], _(b'specify blocking reviewers')),\\n         (\\n             b'm',\\n             b'comment',\\n             b'',\\n             _(b'add a comment to Revisions with new\\/updated Diffs'),\\n         ),\\n         (b'', b'confirm', None, _(b'ask for confirmation before sending')),\\n     ],\\n     _(b'REV [OPTIONS]'),\\n     helpcategory=command.CATEGORY_IMPORT_EXPORT,\\n )\\n def phabsend(ui, repo, *revs, **opts):\\n     \\\"\\\"\\\"upload changesets to Phabricator\\n \\n     If there are multiple revisions specified, they will be send as a stack\\n     with a linear dependencies relationship using the order specified by the\\n     revset.\\n \\n     For the first time uploading changesets, local tags will be created to\\n     maintain the association. After the first time, phabsend will check\\n     obsstore and tags information so it can figure out whether to update an\\n     existing Differential Revision, or create a new one.\\n \\n     If --amend is set, update commit messages so they have the\\n     ``Differential Revision`` URL, remove related tags. This is similar to what\\n     arcanist will do, and is more desired in author-push workflows. Otherwise,\\n     use local tags to record the ``Differential Revision`` association.\\n \\n     The --confirm option lets you confirm changesets before sending them. You\\n     can also add following to your configuration file to make it default\\n     behaviour::\\n \\n         [phabsend]\\n         confirm = true\\n \\n     phabsend will check obsstore and the above association to decide whether to\\n     update an existing Differential Revision, or create a new one.\\n     \\\"\\\"\\\"\\n     opts = pycompat.byteskwargs(opts)\\n     revs = list(revs) + opts.get(b'rev', [])\\n     revs = scmutil.revrange(repo, revs)\\n     revs.sort()  # ascending order to preserve topological parent\\/child in phab\\n \\n     if not revs:\\n         raise error.Abort(_(b'phabsend requires at least one changeset'))\\n     if opts.get(b'amend'):\\n         cmdutil.checkunfinished(repo)\\n \\n     # {newnode: (oldnode, olddiff, olddrev}\\n     oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])\\n \\n     confirm = ui.configbool(b'phabsend', b'confirm')\\n     confirm |= bool(opts.get(b'confirm'))\\n     if confirm:\\n         confirmed = _confirmbeforesend(repo, revs, oldmap)\\n         if not confirmed:\\n             raise error.Abort(_(b'phabsend cancelled'))\\n \\n     actions = []\\n     reviewers = opts.get(b'reviewer', [])\\n     blockers = opts.get(b'blocker', [])\\n     phids = []\\n     if reviewers:\\n         phids.extend(userphids(repo, reviewers))\\n     if blockers:\\n         phids.extend(\\n             map(lambda phid: b'blocking(%s)' % phid, userphids(repo, blockers))\\n         )\\n     if phids:\\n         actions.append({b'type': b'reviewers.add', b'value': phids})\\n \\n     drevids = []  # [int]\\n     diffmap = {}  # {newnode: diff}\\n \\n     # Send patches one by one so we know their Differential Revision PHIDs and\\n     # can provide dependency relationship\\n     lastrevphid = None\\n     for rev in revs:\\n         ui.debug(b'sending rev %d\\\\n' % rev)\\n         ctx = repo[rev]\\n \\n         # Get Differential Revision ID\\n         oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))\\n         if oldnode != ctx.node() or opts.get(b'amend'):\\n             # Create or update Differential Revision\\n             revision, diff = createdifferentialrevision(\\n                 ctx,\\n                 revid,\\n                 lastrevphid,\\n                 oldnode,\\n                 olddiff,\\n                 actions,\\n                 opts.get(b'comment'),\\n             )\\n             diffmap[ctx.node()] = diff\\n             newrevid = int(revision[b'object'][b'id'])\\n             newrevphid = revision[b'object'][b'phid']\\n             if revid:\\n                 action = b'updated'\\n             else:\\n                 action = b'created'\\n \\n             # Create a local tag to note the association, if commit message\\n             # does not have it already\\n             m = _differentialrevisiondescre.search(ctx.description())\\n             if not m or int(m.group('id')) != newrevid:\\n                 tagname = b'D%d' % newrevid\\n                 tags.tag(\\n                     repo,\\n                     tagname,\\n                     ctx.node(),\\n                     message=None,\\n                     user=None,\\n                     date=None,\\n                     local=True,\\n                 )\\n         else:\\n             # Nothing changed. But still set \\\"newrevphid\\\" so the next revision\\n             # could depend on this one and \\\"newrevid\\\" for the summary line.\\n-            newrevphid = querydrev(repo, b'%d' % revid)[0][b'phid']\\n+            newrevphid = querydrev(repo.ui, b'%d' % revid)[0][b'phid']\\n             newrevid = revid\\n             action = b'skipped'\\n \\n         actiondesc = ui.label(\\n             {\\n                 b'created': _(b'created'),\\n                 b'skipped': _(b'skipped'),\\n                 b'updated': _(b'updated'),\\n             }[action],\\n             b'phabricator.action.%s' % action,\\n         )\\n         drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')\\n         nodedesc = ui.label(bytes(ctx), b'phabricator.node')\\n         desc = ui.label(ctx.description().split(b'\\\\n')[0], b'phabricator.desc')\\n         ui.write(\\n             _(b'%s - %s - %s: %s\\\\n') % (drevdesc, actiondesc, nodedesc, desc)\\n         )\\n         drevids.append(newrevid)\\n         lastrevphid = newrevphid\\n \\n     # Update commit messages and remove tags\\n     if opts.get(b'amend'):\\n         unfi = repo.unfiltered()\\n         drevs = callconduit(ui, b'differential.query', {b'ids': drevids})\\n         with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):\\n             wnode = unfi[b'.'].node()\\n             mapping = {}  # {oldnode: [newnode]}\\n             for i, rev in enumerate(revs):\\n                 old = unfi[rev]\\n                 drevid = drevids[i]\\n                 drev = [d for d in drevs if int(d[b'id']) == drevid][0]\\n                 newdesc = getdescfromdrev(drev)\\n                 # Make sure commit message contain \\\"Differential Revision\\\"\\n                 if old.description() != newdesc:\\n                     if old.phase() == phases.public:\\n                         ui.warn(\\n                             _(b\\\"warning: not updating public commit %s\\\\n\\\")\\n                             % scmutil.formatchangeid(old)\\n                         )\\n                         continue\\n                     parents = [\\n                         mapping.get(old.p1().node(), (old.p1(),))[0],\\n                         mapping.get(old.p2().node(), (old.p2(),))[0],\\n                     ]\\n                     new = context.metadataonlyctx(\\n                         repo,\\n                         old,\\n                         parents=parents,\\n                         text=newdesc,\\n                         user=old.user(),\\n                         date=old.date(),\\n                         extra=old.extra(),\\n                     )\\n \\n                     newnode = new.commit()\\n \\n                     mapping[old.node()] = [newnode]\\n                     # Update diff property\\n                     # If it fails just warn and keep going, otherwise the DREV\\n                     # associations will be lost\\n                     try:\\n                         writediffproperties(unfi[newnode], diffmap[old.node()])\\n                     except util.urlerr.urlerror:\\n                         ui.warnnoi18n(\\n                             b'Failed to update metadata for D%d\\\\n' % drevid\\n                         )\\n                 # Remove local tags since it's no longer necessary\\n                 tagname = b'D%d' % drevid\\n                 if tagname in repo.tags():\\n                     tags.tag(\\n                         repo,\\n                         tagname,\\n                         nullid,\\n                         message=None,\\n                         user=None,\\n                         date=None,\\n                         local=True,\\n                     )\\n             scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)\\n             if wnode in mapping:\\n                 unfi.setparents(mapping[wnode][0])\\n \\n \\n # Map from \\\"hg:meta\\\" keys to header understood by \\\"hg import\\\". The order is\\n # consistent with \\\"hg export\\\" output.\\n _metanamemap = util.sortdict(\\n     [\\n         (b'user', b'User'),\\n         (b'date', b'Date'),\\n         (b'branch', b'Branch'),\\n         (b'node', b'Node ID'),\\n         (b'parent', b'Parent '),\\n     ]\\n )\\n \\n \\n def _confirmbeforesend(repo, revs, oldmap):\\n     url, token = readurltoken(repo.ui)\\n     ui = repo.ui\\n     for rev in revs:\\n         ctx = repo[rev]\\n         desc = ctx.description().splitlines()[0]\\n         oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))\\n         if drevid:\\n             drevdesc = ui.label(b'D%d' % drevid, b'phabricator.drev')\\n         else:\\n             drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')\\n \\n         ui.write(\\n             _(b'%s - %s: %s\\\\n')\\n             % (\\n                 drevdesc,\\n                 ui.label(bytes(ctx), b'phabricator.node'),\\n                 ui.label(desc, b'phabricator.desc'),\\n             )\\n         )\\n \\n     if ui.promptchoice(\\n         _(b'Send the above changes to %s (yn)?$$ &Yes $$ &No') % url\\n     ):\\n         return False\\n \\n     return True\\n \\n \\n _knownstatusnames = {\\n     b'accepted',\\n     b'needsreview',\\n     b'needsrevision',\\n     b'closed',\\n     b'abandoned',\\n     b'changesplanned',\\n }\\n \\n \\n def _getstatusname(drev):\\n     \\\"\\\"\\\"get normalized status name from a Differential Revision\\\"\\\"\\\"\\n     return drev[b'statusName'].replace(b' ', b'').lower()\\n \\n \\n # Small language to specify differential revisions. Support symbols: (), :X,\\n # +, and -.\\n \\n _elements = {\\n     # token-type: binding-strength, primary, prefix, infix, suffix\\n     b'(': (12, None, (b'group', 1, b')'), None, None),\\n     b':': (8, None, (b'ancestors', 8), None, None),\\n     b'&': (5, None, None, (b'and_', 5), None),\\n     b'+': (4, None, None, (b'add', 4), None),\\n     b'-': (4, None, None, (b'sub', 4), None),\\n     b')': (0, None, None, None, None),\\n     b'symbol': (0, b'symbol', None, None, None),\\n     b'end': (0, None, None, None, None),\\n }\\n \\n \\n def _tokenize(text):\\n     view = memoryview(text)  # zero-copy slice\\n     special = b'():+-& '\\n     pos = 0\\n     length = len(text)\\n     while pos \\u003c length:\\n         symbol = b''.join(\\n             itertools.takewhile(\\n                 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])\\n             )\\n         )\\n         if symbol:\\n             yield (b'symbol', symbol, pos)\\n             pos += len(symbol)\\n         else:  # special char, ignore space\\n             if text[pos : pos + 1] != b' ':\\n                 yield (text[pos : pos + 1], None, pos)\\n             pos += 1\\n     yield (b'end', None, pos)\\n \\n \\n def _parse(text):\\n     tree, pos = parser.parser(_elements).parse(_tokenize(text))\\n     if pos != len(text):\\n         raise error.ParseError(b'invalid token', pos)\\n     return tree\\n \\n \\n def _parsedrev(symbol):\\n     \\\"\\\"\\\"str -\\u003e int or None, ex. 'D45' -\\u003e 45; '12' -\\u003e 12; 'x' -\\u003e None\\\"\\\"\\\"\\n     if symbol.startswith(b'D') and symbol[1:].isdigit():\\n         return int(symbol[1:])\\n     if symbol.isdigit():\\n         return int(symbol)\\n \\n \\n def _prefetchdrevs(tree):\\n     \\\"\\\"\\\"return ({single-drev-id}, {ancestor-drev-id}) to prefetch\\\"\\\"\\\"\\n     drevs = set()\\n     ancestordrevs = set()\\n     op = tree[0]\\n     if op == b'symbol':\\n         r = _parsedrev(tree[1])\\n         if r:\\n             drevs.add(r)\\n     elif op == b'ancestors':\\n         r, a = _prefetchdrevs(tree[1])\\n         drevs.update(r)\\n         ancestordrevs.update(r)\\n         ancestordrevs.update(a)\\n     else:\\n         for t in tree[1:]:\\n             r, a = _prefetchdrevs(t)\\n             drevs.update(r)\\n             ancestordrevs.update(a)\\n     return drevs, ancestordrevs\\n \\n \\n-def querydrev(repo, spec):\\n+def querydrev(ui, spec):\\n     \\\"\\\"\\\"return a list of \\\"Differential Revision\\\" dicts\\n \\n     spec is a string using a simple query language, see docstring in phabread\\n     for details.\\n \\n     A \\\"Differential Revision dict\\\" looks like:\\n \\n         {\\n             \\\"activeDiffPHID\\\": \\\"PHID-DIFF-xoqnjkobbm6k4dk6hi72\\\",\\n             \\\"authorPHID\\\": \\\"PHID-USER-tv3ohwc4v4jeu34otlye\\\",\\n             \\\"auxiliary\\\": {\\n               \\\"phabricator:depends-on\\\": [\\n                 \\\"PHID-DREV-gbapp366kutjebt7agcd\\\"\\n               ]\\n               \\\"phabricator:projects\\\": [],\\n             },\\n             \\\"branch\\\": \\\"default\\\",\\n             \\\"ccs\\\": [],\\n             \\\"commits\\\": [],\\n             \\\"dateCreated\\\": \\\"1499181406\\\",\\n             \\\"dateModified\\\": \\\"1499182103\\\",\\n             \\\"diffs\\\": [\\n               \\\"3\\\",\\n               \\\"4\\\",\\n             ],\\n             \\\"hashes\\\": [],\\n             \\\"id\\\": \\\"2\\\",\\n             \\\"lineCount\\\": \\\"2\\\",\\n             \\\"phid\\\": \\\"PHID-DREV-672qvysjcczopag46qty\\\",\\n             \\\"properties\\\": {},\\n             \\\"repositoryPHID\\\": \\\"PHID-REPO-hub2hx62ieuqeheznasv\\\",\\n             \\\"reviewers\\\": [],\\n             \\\"sourcePath\\\": null\\n             \\\"status\\\": \\\"0\\\",\\n             \\\"statusName\\\": \\\"Needs Review\\\",\\n             \\\"summary\\\": \\\"\\\",\\n             \\\"testPlan\\\": \\\"\\\",\\n             \\\"title\\\": \\\"example\\\",\\n             \\\"uri\\\": \\\"https:\\/\\/phab.example.com\\/D2\\\",\\n         }\\n     \\\"\\\"\\\"\\n     # TODO: replace differential.query and differential.querydiffs with\\n     # differential.diff.search because the former (and their output) are\\n     # frozen, and planned to be deprecated and removed.\\n \\n     def fetch(params):\\n         \\\"\\\"\\\"params -\\u003e single drev or None\\\"\\\"\\\"\\n         key = (params.get(b'ids') or params.get(b'phids') or [None])[0]\\n         if key in prefetched:\\n             return prefetched[key]\\n-        drevs = callconduit(repo.ui, b'differential.query', params)\\n+        drevs = callconduit(ui, b'differential.query', params)\\n         # Fill prefetched with the result\\n         for drev in drevs:\\n             prefetched[drev[b'phid']] = drev\\n             prefetched[int(drev[b'id'])] = drev\\n         if key not in prefetched:\\n             raise error.Abort(\\n                 _(b'cannot get Differential Revision %r') % params\\n             )\\n         return prefetched[key]\\n \\n     def getstack(topdrevids):\\n         \\\"\\\"\\\"given a top, get a stack from the bottom, [id] -\\u003e [id]\\\"\\\"\\\"\\n         visited = set()\\n         result = []\\n         queue = [{b'ids': [i]} for i in topdrevids]\\n         while queue:\\n             params = queue.pop()\\n             drev = fetch(params)\\n             if drev[b'id'] in visited:\\n                 continue\\n             visited.add(drev[b'id'])\\n             result.append(int(drev[b'id']))\\n             auxiliary = drev.get(b'auxiliary', {})\\n             depends = auxiliary.get(b'phabricator:depends-on', [])\\n             for phid in depends:\\n                 queue.append({b'phids': [phid]})\\n         result.reverse()\\n         return smartset.baseset(result)\\n \\n     # Initialize prefetch cache\\n     prefetched = {}  # {id or phid: drev}\\n \\n     tree = _parse(spec)\\n     drevs, ancestordrevs = _prefetchdrevs(tree)\\n \\n     # developer config: phabricator.batchsize\\n-    batchsize = repo.ui.configint(b'phabricator', b'batchsize')\\n+    batchsize = ui.configint(b'phabricator', b'batchsize')\\n \\n     # Prefetch Differential Revisions in batch\\n     tofetch = set(drevs)\\n     for r in ancestordrevs:\\n         tofetch.update(range(max(1, r - batchsize), r + 1))\\n     if drevs:\\n         fetch({b'ids': list(tofetch)})\\n     validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))\\n \\n     # Walk through the tree, return smartsets\\n     def walk(tree):\\n         op = tree[0]\\n         if op == b'symbol':\\n             drev = _parsedrev(tree[1])\\n             if drev:\\n                 return smartset.baseset([drev])\\n             elif tree[1] in _knownstatusnames:\\n                 drevs = [\\n                     r\\n                     for r in validids\\n                     if _getstatusname(prefetched[r]) == tree[1]\\n                 ]\\n                 return smartset.baseset(drevs)\\n             else:\\n                 raise error.Abort(_(b'unknown symbol: %s') % tree[1])\\n         elif op in {b'and_', b'add', b'sub'}:\\n             assert len(tree) == 3\\n             return getattr(operator, op)(walk(tree[1]), walk(tree[2]))\\n         elif op == b'group':\\n             return walk(tree[1])\\n         elif op == b'ancestors':\\n             return getstack(walk(tree[1]))\\n         else:\\n             raise error.ProgrammingError(b'illegal tree: %r' % tree)\\n \\n     return [prefetched[r] for r in walk(tree)]\\n \\n \\n def getdescfromdrev(drev):\\n     \\\"\\\"\\\"get description (commit message) from \\\"Differential Revision\\\"\\n \\n     This is similar to differential.getcommitmessage API. But we only care\\n     about limited fields: title, summary, test plan, and URL.\\n     \\\"\\\"\\\"\\n     title = drev[b'title']\\n     summary = drev[b'summary'].rstrip()\\n     testplan = drev[b'testPlan'].rstrip()\\n     if testplan:\\n         testplan = b'Test Plan:\\\\n%s' % testplan\\n     uri = b'Differential Revision: %s' % drev[b'uri']\\n     return b'\\\\n\\\\n'.join(filter(None, [title, summary, testplan, uri]))\\n \\n \\n def getdiffmeta(diff):\\n     \\\"\\\"\\\"get commit metadata (date, node, user, p1) from a diff object\\n \\n     The metadata could be \\\"hg:meta\\\", sent by phabsend, like:\\n \\n         \\\"properties\\\": {\\n           \\\"hg:meta\\\": {\\n             \\\"branch\\\": \\\"default\\\",\\n             \\\"date\\\": \\\"1499571514 25200\\\",\\n             \\\"node\\\": \\\"98c08acae292b2faf60a279b4189beb6cff1414d\\\",\\n             \\\"user\\\": \\\"Foo Bar \\u003cfoo@example.com\\u003e\\\",\\n             \\\"parent\\\": \\\"6d0abad76b30e4724a37ab8721d630394070fe16\\\"\\n           }\\n         }\\n \\n     Or converted from \\\"local:commits\\\", sent by \\\"arc\\\", like:\\n \\n         \\\"properties\\\": {\\n           \\\"local:commits\\\": {\\n             \\\"98c08acae292b2faf60a279b4189beb6cff1414d\\\": {\\n               \\\"author\\\": \\\"Foo Bar\\\",\\n               \\\"authorEmail\\\": \\\"foo@example.com\\\"\\n               \\\"branch\\\": \\\"default\\\",\\n               \\\"commit\\\": \\\"98c08acae292b2faf60a279b4189beb6cff1414d\\\",\\n               \\\"local\\\": \\\"1000\\\",\\n               \\\"message\\\": \\\"...\\\",\\n               \\\"parents\\\": [\\\"6d0abad76b30e4724a37ab8721d630394070fe16\\\"],\\n               \\\"rev\\\": \\\"98c08acae292b2faf60a279b4189beb6cff1414d\\\",\\n               \\\"summary\\\": \\\"...\\\",\\n               \\\"tag\\\": \\\"\\\",\\n               \\\"time\\\": 1499546314,\\n             }\\n           }\\n         }\\n \\n     Note: metadata extracted from \\\"local:commits\\\" will lose time zone\\n     information.\\n     \\\"\\\"\\\"\\n     props = diff.get(b'properties') or {}\\n     meta = props.get(b'hg:meta')\\n     if not meta:\\n         if props.get(b'local:commits'):\\n             commit = sorted(props[b'local:commits'].values())[0]\\n             meta = {}\\n             if b'author' in commit and b'authorEmail' in commit:\\n                 meta[b'user'] = b'%s \\u003c%s\\u003e' % (\\n                     commit[b'author'],\\n                     commit[b'authorEmail'],\\n                 )\\n             if b'time' in commit:\\n                 meta[b'date'] = b'%d 0' % int(commit[b'time'])\\n             if b'branch' in commit:\\n                 meta[b'branch'] = commit[b'branch']\\n             node = commit.get(b'commit', commit.get(b'rev'))\\n             if node:\\n                 meta[b'node'] = node\\n             if len(commit.get(b'parents', ())) \\u003e= 1:\\n                 meta[b'parent'] = commit[b'parents'][0]\\n         else:\\n             meta = {}\\n     if b'date' not in meta and b'dateCreated' in diff:\\n         meta[b'date'] = b'%s 0' % diff[b'dateCreated']\\n     if b'branch' not in meta and diff.get(b'branch'):\\n         meta[b'branch'] = diff[b'branch']\\n     if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):\\n         meta[b'parent'] = diff[b'sourceControlBaseRevision']\\n     return meta\\n \\n \\n def readpatch(ui, drevs, write):\\n     \\\"\\\"\\\"generate plain-text patch readable by 'hg import'\\n \\n     write is usually ui.write. drevs is what \\\"querydrev\\\" returns, results of\\n     \\\"differential.query\\\".\\n     \\\"\\\"\\\"\\n     # Prefetch hg:meta property for all diffs\\n     diffids = sorted(set(max(int(v) for v in drev[b'diffs']) for drev in drevs))\\n     diffs = callconduit(ui, b'differential.querydiffs', {b'ids': diffids})\\n \\n     # Generate patch for each drev\\n     for drev in drevs:\\n         ui.note(_(b'reading D%s\\\\n') % drev[b'id'])\\n \\n         diffid = max(int(v) for v in drev[b'diffs'])\\n         body = callconduit(ui, b'differential.getrawdiff', {b'diffID': diffid})\\n         desc = getdescfromdrev(drev)\\n         header = b'# HG changeset patch\\\\n'\\n \\n         # Try to preserve metadata from hg:meta property. Write hg patch\\n         # headers that can be read by the \\\"import\\\" command. See patchheadermap\\n         # and extract in mercurial\\/patch.py for supported headers.\\n         meta = getdiffmeta(diffs[b'%d' % diffid])\\n         for k in _metanamemap.keys():\\n             if k in meta:\\n                 header += b'# %s %s\\\\n' % (_metanamemap[k], meta[k])\\n \\n         content = b'%s%s\\\\n%s' % (header, desc, body)\\n         write(content)\\n \\n \\n @vcrcommand(\\n     b'phabread',\\n     [(b'', b'stack', False, _(b'read dependencies'))],\\n     _(b'DREVSPEC [OPTIONS]'),\\n     helpcategory=command.CATEGORY_IMPORT_EXPORT,\\n )\\n def phabread(ui, repo, spec, **opts):\\n     \\\"\\\"\\\"print patches from Phabricator suitable for importing\\n \\n     DREVSPEC could be a Differential Revision identity, like ``D123``, or just\\n     the number ``123``. It could also have common operators like ``+``, ``-``,\\n     ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to\\n     select a stack.\\n \\n     ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``\\n     could be used to filter patches by status. For performance reason, they\\n     only represent a subset of non-status selections and cannot be used alone.\\n \\n     For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude\\n     D2 and D4. ``:D9 & needsreview`` selects \\\"Needs Review\\\" revisions in a\\n     stack up to D9.\\n \\n     If --stack is given, follow dependencies information and read all patches.\\n     It is equivalent to the ``:`` operator.\\n     \\\"\\\"\\\"\\n     opts = pycompat.byteskwargs(opts)\\n     if opts.get(b'stack'):\\n         spec = b':(%s)' % spec\\n-    drevs = querydrev(repo, spec)\\n+    drevs = querydrev(repo.ui, spec)\\n     readpatch(repo.ui, drevs, ui.write)\\n \\n \\n @vcrcommand(\\n     b'phabupdate',\\n     [\\n         (b'', b'accept', False, _(b'accept revisions')),\\n         (b'', b'reject', False, _(b'reject revisions')),\\n         (b'', b'abandon', False, _(b'abandon revisions')),\\n         (b'', b'reclaim', False, _(b'reclaim revisions')),\\n         (b'm', b'comment', b'', _(b'comment on the last revision')),\\n     ],\\n     _(b'DREVSPEC [OPTIONS]'),\\n     helpcategory=command.CATEGORY_IMPORT_EXPORT,\\n )\\n def phabupdate(ui, repo, spec, **opts):\\n     \\\"\\\"\\\"update Differential Revision in batch\\n \\n     DREVSPEC selects revisions. See :hg:`help phabread` for its usage.\\n     \\\"\\\"\\\"\\n     opts = pycompat.byteskwargs(opts)\\n     flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]\\n     if len(flags) \\u003e 1:\\n         raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))\\n \\n     actions = []\\n     for f in flags:\\n         actions.append({b'type': f, b'value': True})\\n \\n-    drevs = querydrev(repo, spec)\\n+    drevs = querydrev(repo.ui, spec)\\n     for i, drev in enumerate(drevs):\\n         if i + 1 == len(drevs) and opts.get(b'comment'):\\n             actions.append({b'type': b'comment', b'value': opts[b'comment']})\\n         if actions:\\n             params = {\\n                 b'objectIdentifier': drev[b'phid'],\\n                 b'transactions': actions,\\n             }\\n             callconduit(ui, b'differential.revision.edit', params)\\n \\n \\n @eh.templatekeyword(b'phabreview', requires={b'ctx'})\\n def template_review(context, mapping):\\n     \\\"\\\"\\\":phabreview: Object describing the review for this changeset.\\n     Has attributes `url` and `id`.\\n     \\\"\\\"\\\"\\n     ctx = context.resource(mapping, b'ctx')\\n     m = _differentialrevisiondescre.search(ctx.description())\\n     if m:\\n         return templateutil.hybriddict(\\n             {b'url': m.group('url'), b'id': b\\\"D%s\\\" % m.group('id'),}\\n         )\\n     else:\\n         tags = ctx.repo().nodetags(ctx.node())\\n         for t in tags:\\n             if _differentialrevisiontagre.match(t):\\n                 url = ctx.repo().ui.config(b'phabricator', b'url')\\n                 if not url.endswith(b'\\/'):\\n                     url += b'\\/'\\n                 url += t\\n \\n                 return templateutil.hybriddict({b'url': url, b'id': t,})\\n     return None\\n \\n \\n @eh.templatekeyword(b'phabstatus', requires={b'ctx', b'repo', b'ui'})\\n def template_status(context, mapping):\\n     \\\"\\\"\\\":phabstatus: String. Status of Phabricator differential.\\n     \\\"\\\"\\\"\\n     ctx = context.resource(mapping, b'ctx')\\n     repo = context.resource(mapping, b'repo')\\n     ui = context.resource(mapping, b'ui')\\n \\n     rev = ctx.rev()\\n     try:\\n         drevid = getdrevmap(repo, [rev])[rev]\\n     except KeyError:\\n         return None\\n     drevs = callconduit(ui, b'differential.query', {b'ids': [drevid]})\\n     for drev in drevs:\\n         if int(drev[b'id']) == drevid:\\n             return templateutil.hybriddict(\\n                 {b'url': drev[b'uri'], b'status': drev[b'statusName'],}\\n             )\\n     return None\\n \\n \\n @show.showview(b'phabstatus', csettopic=b'work')\\n def phabstatusshowview(ui, repo, displayer):\\n     \\\"\\\"\\\"Phabricator differiential status\\\"\\\"\\\"\\n     revs = repo.revs('sort(_underway(), topo)')\\n     drevmap = getdrevmap(repo, revs)\\n     unknownrevs, drevids, revsbydrevid = [], set([]), {}\\n     for rev, drevid in pycompat.iteritems(drevmap):\\n         if drevid is not None:\\n             drevids.add(drevid)\\n             revsbydrevid.setdefault(drevid, set([])).add(rev)\\n         else:\\n             unknownrevs.append(rev)\\n \\n     drevs = callconduit(ui, b'differential.query', {b'ids': list(drevids)})\\n     drevsbyrev = {}\\n     for drev in drevs:\\n         for rev in revsbydrevid[int(drev[b'id'])]:\\n             drevsbyrev[rev] = drev\\n \\n     def phabstatus(ctx):\\n         drev = drevsbyrev[ctx.rev()]\\n         status = ui.label(\\n             b'%(statusName)s' % drev,\\n             b'phabricator.status.%s' % _getstatusname(drev),\\n         )\\n         ui.write(b\\\"\\\\n%s %s\\\\n\\\" % (drev[b'uri'], status))\\n \\n     revs -= smartset.baseset(unknownrevs)\\n     revdag = graphmod.dagwalker(repo, revs)\\n \\n     ui.setconfig(b'experimental', b'graphshorten', True)\\n     displayer._exthook = phabstatus\\n     nodelen = show.longestshortest(repo, revs)\\n     logcmdutil.displaygraph(\\n         ui,\\n         repo,\\n         revdag,\\n         displayer,\\n         graphmod.asciiedges,\\n         props={b'nodelen': nodelen},\\n     )\\n\"}]}],\"properties\":[]},\"20441\":{\"id\":\"20441\",\"revisionID\":\"8205\",\"dateCreated\":\"1581962476\",\"dateModified\":\"1583327820\",\"sourceControlBaseRevision\":\"69392460f7b1adf37a2b1206af8a5eb8ca0828f1\",\"sourceControlPath\":null,\"sourceControlSystem\":\"hg\",\"branch\":null,\"bookmark\":null,\"creationMethod\":\"commit\",\"description\":\"rHG72c6190de577bb2bb448eb2b14121e4ef85d08ff\",\"unitStatus\":\"6\",\"lintStatus\":\"6\",\"changes\":[{\"id\":\"55596\",\"metadata\":{\"line:first\":1612,\"hash.effect\":\"8g80k6H5tv6i\"},\"oldPath\":\"hgext\\/phabricator.py\",\"currentPath\":\"hgext\\/phabricator.py\",\"awayPaths\":[],\"oldProperties\":[],\"newProperties\":[],\"type\":\"2\",\"fileType\":\"1\",\"commitHash\":null,\"addLines\":\"5\",\"delLines\":\"7\",\"hunks\":[{\"oldOffset\":\"1\",\"newOffset\":\"1\",\"oldLength\":\"1801\",\"newLength\":\"1799\",\"addLines\":null,\"delLines\":null,\"isMissingOldNewline\":null,\"isMissingNewNewline\":null,\"corpus\":\" # phabricator.py - simple Phabricator integration\\n #\\n # Copyright 2017 Facebook, Inc.\\n #\\n # This software may be used and distributed according to the terms of the\\n # GNU General Public License version 2 or any later version.\\n \\\"\\\"\\\"simple Phabricator integration (EXPERIMENTAL)\\n \\n This extension provides a ``phabsend`` command which sends a stack of\\n changesets to Phabricator, and a ``phabread`` command which prints a stack of\\n revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command\\n to update statuses in batch.\\n \\n A \\\"phabstatus\\\" view for :hg:`show` is also provided; it displays status\\n information of Phabricator differentials associated with unfinished\\n changesets.\\n \\n By default, Phabricator requires ``Test Plan`` which might prevent some\\n changeset from being sent. The requirement could be disabled by changing\\n ``differential.require-test-plan-field`` config server side.\\n \\n Config::\\n \\n     [phabricator]\\n     # Phabricator URL\\n     url = https:\\/\\/phab.example.com\\/\\n \\n     # Repo callsign. If a repo has a URL https:\\/\\/$HOST\\/diffusion\\/FOO, then its\\n     # callsign is \\\"FOO\\\".\\n     callsign = FOO\\n \\n     # curl command to use. If not set (default), use builtin HTTP library to\\n     # communicate. If set, use the specified curl command. This could be useful\\n     # if you need to specify advanced options that is not easily supported by\\n     # the internal library.\\n     curlcmd = curl --connect-timeout 2 --retry 3 --silent\\n \\n     [auth]\\n     example.schemes = https\\n     example.prefix = phab.example.com\\n \\n     # API token. Get it from https:\\/\\/$HOST\\/conduit\\/login\\/\\n     example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx\\n \\\"\\\"\\\"\\n \\n from __future__ import absolute_import\\n \\n import base64\\n import contextlib\\n import hashlib\\n import itertools\\n import json\\n import mimetypes\\n import operator\\n import re\\n \\n from mercurial.node import bin, nullid\\n from mercurial.i18n import _\\n from mercurial.pycompat import getattr\\n from mercurial.thirdparty import attr\\n from mercurial import (\\n     cmdutil,\\n     context,\\n     encoding,\\n     error,\\n     exthelper,\\n     graphmod,\\n     httpconnection as httpconnectionmod,\\n     localrepo,\\n     logcmdutil,\\n     match,\\n     mdiff,\\n     obsutil,\\n     parser,\\n     patch,\\n     phases,\\n     pycompat,\\n     scmutil,\\n     smartset,\\n     tags,\\n     templatefilters,\\n     templateutil,\\n     url as urlmod,\\n     util,\\n )\\n from mercurial.utils import (\\n     procutil,\\n     stringutil,\\n )\\n from . import show\\n \\n \\n # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for\\n # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should\\n # be specifying the version(s) of Mercurial they are tested with, or\\n # leave the attribute unspecified.\\n testedwith = b'ships-with-hg-core'\\n \\n eh = exthelper.exthelper()\\n \\n cmdtable = eh.cmdtable\\n command = eh.command\\n configtable = eh.configtable\\n templatekeyword = eh.templatekeyword\\n uisetup = eh.finaluisetup\\n \\n # developer config: phabricator.batchsize\\n eh.configitem(\\n     b'phabricator', b'batchsize', default=12,\\n )\\n eh.configitem(\\n     b'phabricator', b'callsign', default=None,\\n )\\n eh.configitem(\\n     b'phabricator', b'curlcmd', default=None,\\n )\\n # developer config: phabricator.repophid\\n eh.configitem(\\n     b'phabricator', b'repophid', default=None,\\n )\\n eh.configitem(\\n     b'phabricator', b'url', default=None,\\n )\\n eh.configitem(\\n     b'phabsend', b'confirm', default=False,\\n )\\n \\n colortable = {\\n     b'phabricator.action.created': b'green',\\n     b'phabricator.action.skipped': b'magenta',\\n     b'phabricator.action.updated': b'magenta',\\n     b'phabricator.desc': b'',\\n     b'phabricator.drev': b'bold',\\n     b'phabricator.node': b'',\\n     b'phabricator.status.abandoned': b'magenta dim',\\n     b'phabricator.status.accepted': b'green bold',\\n     b'phabricator.status.closed': b'green',\\n     b'phabricator.status.needsreview': b'yellow',\\n     b'phabricator.status.needsrevision': b'red',\\n     b'phabricator.status.changesplanned': b'red',\\n }\\n \\n _VCR_FLAGS = [\\n     (\\n         b'',\\n         b'test-vcr',\\n         b'',\\n         _(\\n             b'Path to a vcr file. If nonexistent, will record a new vcr transcript'\\n             b', otherwise will mock all http requests using the specified vcr file.'\\n             b' (ADVANCED)'\\n         ),\\n     ),\\n ]\\n \\n \\n @eh.wrapfunction(localrepo, \\\"loadhgrc\\\")\\n def _loadhgrc(orig, ui, wdirvfs, hgvfs, requirements):\\n     \\\"\\\"\\\"Load ``.arcconfig`` content into a ui instance on repository open.\\n     \\\"\\\"\\\"\\n     result = False\\n     arcconfig = {}\\n \\n     try:\\n         # json.loads only accepts bytes from 3.6+\\n         rawparams = encoding.unifromlocal(wdirvfs.read(b\\\".arcconfig\\\"))\\n         # json.loads only returns unicode strings\\n         arcconfig = pycompat.rapply(\\n             lambda x: encoding.unitolocal(x)\\n             if isinstance(x, pycompat.unicode)\\n             else x,\\n             pycompat.json_loads(rawparams),\\n         )\\n \\n         result = True\\n     except ValueError:\\n         ui.warn(_(b\\\"invalid JSON in %s\\\\n\\\") % wdirvfs.join(b\\\".arcconfig\\\"))\\n     except IOError:\\n         pass\\n \\n     cfg = util.sortdict()\\n \\n     if b\\\"repository.callsign\\\" in arcconfig:\\n         cfg[(b\\\"phabricator\\\", b\\\"callsign\\\")] = arcconfig[b\\\"repository.callsign\\\"]\\n \\n     if b\\\"phabricator.uri\\\" in arcconfig:\\n         cfg[(b\\\"phabricator\\\", b\\\"url\\\")] = arcconfig[b\\\"phabricator.uri\\\"]\\n \\n     if cfg:\\n         ui.applyconfig(cfg, source=wdirvfs.join(b\\\".arcconfig\\\"))\\n \\n     return orig(ui, wdirvfs, hgvfs, requirements) or result  # Load .hg\\/hgrc\\n \\n \\n def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):\\n     fullflags = flags + _VCR_FLAGS\\n \\n     def hgmatcher(r1, r2):\\n         if r1.uri != r2.uri or r1.method != r2.method:\\n             return False\\n         r1params = util.urlreq.parseqs(r1.body)\\n         r2params = util.urlreq.parseqs(r2.body)\\n         for key in r1params:\\n             if key not in r2params:\\n                 return False\\n             value = r1params[key][0]\\n             # we want to compare json payloads without worrying about ordering\\n             if value.startswith(b'{') and value.endswith(b'}'):\\n                 r1json = pycompat.json_loads(value)\\n                 r2json = pycompat.json_loads(r2params[key][0])\\n                 if r1json != r2json:\\n                     return False\\n             elif r2params[key][0] != value:\\n                 return False\\n         return True\\n \\n     def sanitiserequest(request):\\n         request.body = re.sub(\\n             br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body\\n         )\\n         return request\\n \\n     def sanitiseresponse(response):\\n         if 'set-cookie' in response['headers']:\\n             del response['headers']['set-cookie']\\n         return response\\n \\n     def decorate(fn):\\n         def inner(*args, **kwargs):\\n             cassette = pycompat.fsdecode(kwargs.pop('test_vcr', None))\\n             if cassette:\\n                 import hgdemandimport\\n \\n                 with hgdemandimport.deactivated():\\n                     import vcr as vcrmod\\n                     import vcr.stubs as stubs\\n \\n                     vcr = vcrmod.VCR(\\n                         serializer='json',\\n                         before_record_request=sanitiserequest,\\n                         before_record_response=sanitiseresponse,\\n                         custom_patches=[\\n                             (\\n                                 urlmod,\\n                                 'httpconnection',\\n                                 stubs.VCRHTTPConnection,\\n                             ),\\n                             (\\n                                 urlmod,\\n                                 'httpsconnection',\\n                                 stubs.VCRHTTPSConnection,\\n                             ),\\n                         ],\\n                     )\\n                     vcr.register_matcher('hgmatcher', hgmatcher)\\n                     with vcr.use_cassette(cassette, match_on=['hgmatcher']):\\n                         return fn(*args, **kwargs)\\n             return fn(*args, **kwargs)\\n \\n         inner.__name__ = fn.__name__\\n         inner.__doc__ = fn.__doc__\\n         return command(\\n             name,\\n             fullflags,\\n             spec,\\n             helpcategory=helpcategory,\\n             optionalrepo=optionalrepo,\\n         )(inner)\\n \\n     return decorate\\n \\n \\n def urlencodenested(params):\\n     \\\"\\\"\\\"like urlencode, but works with nested parameters.\\n \\n     For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be\\n     flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to\\n     urlencode. Note: the encoding is consistent with PHP's http_build_query.\\n     \\\"\\\"\\\"\\n     flatparams = util.sortdict()\\n \\n     def process(prefix, obj):\\n         if isinstance(obj, bool):\\n             obj = {True: b'true', False: b'false'}[obj]  # Python -\\u003e PHP form\\n         lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]\\n         items = {list: lister, dict: lambda x: x.items()}.get(type(obj))\\n         if items is None:\\n             flatparams[prefix] = obj\\n         else:\\n             for k, v in items(obj):\\n                 if prefix:\\n                     process(b'%s[%s]' % (prefix, k), v)\\n                 else:\\n                     process(k, v)\\n \\n     process(b'', params)\\n     return util.urlreq.urlencode(flatparams)\\n \\n \\n def readurltoken(ui):\\n     \\\"\\\"\\\"return conduit url, token and make sure they exist\\n \\n     Currently read from [auth] config section. In the future, it might\\n     make sense to read from .arcconfig and .arcrc as well.\\n     \\\"\\\"\\\"\\n     url = ui.config(b'phabricator', b'url')\\n     if not url:\\n         raise error.Abort(\\n             _(b'config %s.%s is required') % (b'phabricator', b'url')\\n         )\\n \\n     res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)\\n     token = None\\n \\n     if res:\\n         group, auth = res\\n \\n         ui.debug(b\\\"using auth.%s.* for authentication\\\\n\\\" % group)\\n \\n         token = auth.get(b'phabtoken')\\n \\n     if not token:\\n         raise error.Abort(\\n             _(b'Can\\\\'t find conduit token associated to %s') % (url,)\\n         )\\n \\n     return url, token\\n \\n \\n def callconduit(ui, name, params):\\n     \\\"\\\"\\\"call Conduit API, params is a dict. return json.loads result, or None\\\"\\\"\\\"\\n     host, token = readurltoken(ui)\\n     url, authinfo = util.url(b'\\/'.join([host, b'api', name])).authinfo()\\n     ui.debug(b'Conduit Call: %s %s\\\\n' % (url, pycompat.byterepr(params)))\\n     params = params.copy()\\n     params[b'__conduit__'] = {\\n         b'token': token,\\n     }\\n     rawdata = {\\n         b'params': templatefilters.json(params),\\n         b'output': b'json',\\n         b'__conduit__': 1,\\n     }\\n     data = urlencodenested(rawdata)\\n     curlcmd = ui.config(b'phabricator', b'curlcmd')\\n     if curlcmd:\\n         sin, sout = procutil.popen2(\\n             b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))\\n         )\\n         sin.write(data)\\n         sin.close()\\n         body = sout.read()\\n     else:\\n         urlopener = urlmod.opener(ui, authinfo)\\n         request = util.urlreq.request(pycompat.strurl(url), data=data)\\n         with contextlib.closing(urlopener.open(request)) as rsp:\\n             body = rsp.read()\\n     ui.debug(b'Conduit Response: %s\\\\n' % body)\\n     parsed = pycompat.rapply(\\n         lambda x: encoding.unitolocal(x)\\n         if isinstance(x, pycompat.unicode)\\n         else x,\\n         # json.loads only accepts bytes from py3.6+\\n         pycompat.json_loads(encoding.unifromlocal(body)),\\n     )\\n     if parsed.get(b'error_code'):\\n         msg = _(b'Conduit Error (%s): %s') % (\\n             parsed[b'error_code'],\\n             parsed[b'error_info'],\\n         )\\n         raise error.Abort(msg)\\n     return parsed[b'result']\\n \\n \\n @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)\\n def debugcallconduit(ui, repo, name):\\n     \\\"\\\"\\\"call Conduit API\\n \\n     Call parameters are read from stdin as a JSON blob. Result will be written\\n     to stdout as a JSON blob.\\n     \\\"\\\"\\\"\\n     # json.loads only accepts bytes from 3.6+\\n     rawparams = encoding.unifromlocal(ui.fin.read())\\n     # json.loads only returns unicode strings\\n     params = pycompat.rapply(\\n         lambda x: encoding.unitolocal(x)\\n         if isinstance(x, pycompat.unicode)\\n         else x,\\n         pycompat.json_loads(rawparams),\\n     )\\n     # json.dumps only accepts unicode strings\\n     result = pycompat.rapply(\\n         lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,\\n         callconduit(ui, name, params),\\n     )\\n     s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))\\n     ui.write(b'%s\\\\n' % encoding.unitolocal(s))\\n \\n \\n def getrepophid(repo):\\n     \\\"\\\"\\\"given callsign, return repository PHID or None\\\"\\\"\\\"\\n     # developer config: phabricator.repophid\\n     repophid = repo.ui.config(b'phabricator', b'repophid')\\n     if repophid:\\n         return repophid\\n     callsign = repo.ui.config(b'phabricator', b'callsign')\\n     if not callsign:\\n         return None\\n     query = callconduit(\\n         repo.ui,\\n         b'diffusion.repository.search',\\n         {b'constraints': {b'callsigns': [callsign]}},\\n     )\\n     if len(query[b'data']) == 0:\\n         return None\\n     repophid = query[b'data'][0][b'phid']\\n     repo.ui.setconfig(b'phabricator', b'repophid', repophid)\\n     return repophid\\n \\n \\n _differentialrevisiontagre = re.compile(br'\\\\AD([1-9][0-9]*)\\\\Z')\\n _differentialrevisiondescre = re.compile(\\n     br'^Differential Revision:\\\\s*(?P\\u003curl\\u003e(?:.*)D(?P\\u003cid\\u003e[1-9][0-9]*))$', re.M\\n )\\n \\n \\n def getoldnodedrevmap(repo, nodelist):\\n     \\\"\\\"\\\"find previous nodes that has been sent to Phabricator\\n \\n     return {node: (oldnode, Differential diff, Differential Revision ID)}\\n     for node in nodelist with known previous sent versions, or associated\\n     Differential Revision IDs. ``oldnode`` and ``Differential diff`` could\\n     be ``None``.\\n \\n     Examines commit messages like \\\"Differential Revision:\\\" to get the\\n     association information.\\n \\n     If such commit message line is not found, examines all precursors and their\\n     tags. Tags with format like \\\"D1234\\\" are considered a match and the node\\n     with that tag, and the number after \\\"D\\\" (ex. 1234) will be returned.\\n \\n     The ``old node``, if not None, is guaranteed to be the last diff of\\n     corresponding Differential Revision, and exist in the repo.\\n     \\\"\\\"\\\"\\n     unfi = repo.unfiltered()\\n     has_node = unfi.changelog.index.has_node\\n \\n     result = {}  # {node: (oldnode?, lastdiff?, drev)}\\n     toconfirm = {}  # {node: (force, {precnode}, drev)}\\n     for node in nodelist:\\n         ctx = unfi[node]\\n         # For tags like \\\"D123\\\", put them into \\\"toconfirm\\\" to verify later\\n         precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))\\n         for n in precnodes:\\n             if has_node(n):\\n                 for tag in unfi.nodetags(n):\\n                     m = _differentialrevisiontagre.match(tag)\\n                     if m:\\n                         toconfirm[node] = (0, set(precnodes), int(m.group(1)))\\n                         break\\n                 else:\\n                     continue  # move to next predecessor\\n                 break  # found a tag, stop\\n         else:\\n             # Check commit message\\n             m = _differentialrevisiondescre.search(ctx.description())\\n             if m:\\n                 toconfirm[node] = (1, set(precnodes), int(m.group('id')))\\n \\n     # Double check if tags are genuine by collecting all old nodes from\\n     # Phabricator, and expect precursors overlap with it.\\n     if toconfirm:\\n         drevs = [drev for force, precs, drev in toconfirm.values()]\\n         alldiffs = callconduit(\\n             unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}\\n         )\\n         getnode = lambda d: bin(getdiffmeta(d).get(b'node', b'')) or None\\n         for newnode, (force, precset, drev) in toconfirm.items():\\n             diffs = [\\n                 d for d in alldiffs.values() if int(d[b'revisionID']) == drev\\n             ]\\n \\n             # \\\"precursors\\\" as known by Phabricator\\n             phprecset = set(getnode(d) for d in diffs)\\n \\n             # Ignore if precursors (Phabricator and local repo) do not overlap,\\n             # and force is not set (when commit message says nothing)\\n             if not force and not bool(phprecset & precset):\\n                 tagname = b'D%d' % drev\\n                 tags.tag(\\n                     repo,\\n                     tagname,\\n                     nullid,\\n                     message=None,\\n                     user=None,\\n                     date=None,\\n                     local=True,\\n                 )\\n                 unfi.ui.warn(\\n                     _(\\n                         b'D%d: local tag removed - does not match '\\n                         b'Differential history\\\\n'\\n                     )\\n                     % drev\\n                 )\\n                 continue\\n \\n             # Find the last node using Phabricator metadata, and make sure it\\n             # exists in the repo\\n             oldnode = lastdiff = None\\n             if diffs:\\n                 lastdiff = max(diffs, key=lambda d: int(d[b'id']))\\n                 oldnode = getnode(lastdiff)\\n                 if oldnode and not has_node(oldnode):\\n                     oldnode = None\\n \\n             result[newnode] = (oldnode, lastdiff, drev)\\n \\n     return result\\n \\n \\n def getdrevmap(repo, revs):\\n     \\\"\\\"\\\"Return a dict mapping each rev in `revs` to their Differential Revision\\n     ID or None.\\n     \\\"\\\"\\\"\\n     result = {}\\n     for rev in revs:\\n         result[rev] = None\\n         ctx = repo[rev]\\n         # Check commit message\\n         m = _differentialrevisiondescre.search(ctx.description())\\n         if m:\\n             result[rev] = int(m.group('id'))\\n             continue\\n         # Check tags\\n         for tag in repo.nodetags(ctx.node()):\\n             m = _differentialrevisiontagre.match(tag)\\n             if m:\\n                 result[rev] = int(m.group(1))\\n                 break\\n \\n     return result\\n \\n \\n def getdiff(ctx, diffopts):\\n     \\\"\\\"\\\"plain-text diff without header (user, commit message, etc)\\\"\\\"\\\"\\n     output = util.stringio()\\n     for chunk, _label in patch.diffui(\\n         ctx.repo(), ctx.p1().node(), ctx.node(), None, opts=diffopts\\n     ):\\n         output.write(chunk)\\n     return output.getvalue()\\n \\n \\n class DiffChangeType(object):\\n     ADD = 1\\n     CHANGE = 2\\n     DELETE = 3\\n     MOVE_AWAY = 4\\n     COPY_AWAY = 5\\n     MOVE_HERE = 6\\n     COPY_HERE = 7\\n     MULTICOPY = 8\\n \\n \\n class DiffFileType(object):\\n     TEXT = 1\\n     IMAGE = 2\\n     BINARY = 3\\n \\n \\n @attr.s\\n class phabhunk(dict):\\n     \\\"\\\"\\\"Represents a Differential hunk, which is owned by a Differential change\\n     \\\"\\\"\\\"\\n \\n     oldOffset = attr.ib(default=0)  # camelcase-required\\n     oldLength = attr.ib(default=0)  # camelcase-required\\n     newOffset = attr.ib(default=0)  # camelcase-required\\n     newLength = attr.ib(default=0)  # camelcase-required\\n     corpus = attr.ib(default='')\\n     # These get added to the phabchange's equivalents\\n     addLines = attr.ib(default=0)  # camelcase-required\\n     delLines = attr.ib(default=0)  # camelcase-required\\n \\n \\n @attr.s\\n class phabchange(object):\\n     \\\"\\\"\\\"Represents a Differential change, owns Differential hunks and owned by a\\n     Differential diff.  Each one represents one file in a diff.\\n     \\\"\\\"\\\"\\n \\n     currentPath = attr.ib(default=None)  # camelcase-required\\n     oldPath = attr.ib(default=None)  # camelcase-required\\n     awayPaths = attr.ib(default=attr.Factory(list))  # camelcase-required\\n     metadata = attr.ib(default=attr.Factory(dict))\\n     oldProperties = attr.ib(default=attr.Factory(dict))  # camelcase-required\\n     newProperties = attr.ib(default=attr.Factory(dict))  # camelcase-required\\n     type = attr.ib(default=DiffChangeType.CHANGE)\\n     fileType = attr.ib(default=DiffFileType.TEXT)  # camelcase-required\\n     commitHash = attr.ib(default=None)  # camelcase-required\\n     addLines = attr.ib(default=0)  # camelcase-required\\n     delLines = attr.ib(default=0)  # camelcase-required\\n     hunks = attr.ib(default=attr.Factory(list))\\n \\n     def copynewmetadatatoold(self):\\n         for key in list(self.metadata.keys()):\\n             newkey = key.replace(b'new:', b'old:')\\n             self.metadata[newkey] = self.metadata[key]\\n \\n     def addoldmode(self, value):\\n         self.oldProperties[b'unix:filemode'] = value\\n \\n     def addnewmode(self, value):\\n         self.newProperties[b'unix:filemode'] = value\\n \\n     def addhunk(self, hunk):\\n         if not isinstance(hunk, phabhunk):\\n             raise error.Abort(b'phabchange.addhunk only takes phabhunks')\\n         self.hunks.append(pycompat.byteskwargs(attr.asdict(hunk)))\\n         # It's useful to include these stats since the Phab web UI shows them,\\n         # and uses them to estimate how large a change a Revision is. Also used\\n         # in email subjects for the [+++--] bit.\\n         self.addLines += hunk.addLines\\n         self.delLines += hunk.delLines\\n \\n \\n @attr.s\\n class phabdiff(object):\\n     \\\"\\\"\\\"Represents a Differential diff, owns Differential changes.  Corresponds\\n     to a commit.\\n     \\\"\\\"\\\"\\n \\n     # Doesn't seem to be any reason to send this (output of uname -n)\\n     sourceMachine = attr.ib(default=b'')  # camelcase-required\\n     sourcePath = attr.ib(default=b'\\/')  # camelcase-required\\n     sourceControlBaseRevision = attr.ib(default=b'0' * 40)  # camelcase-required\\n     sourceControlPath = attr.ib(default=b'\\/')  # camelcase-required\\n     sourceControlSystem = attr.ib(default=b'hg')  # camelcase-required\\n     branch = attr.ib(default=b'default')\\n     bookmark = attr.ib(default=None)\\n     creationMethod = attr.ib(default=b'phabsend')  # camelcase-required\\n     lintStatus = attr.ib(default=b'none')  # camelcase-required\\n     unitStatus = attr.ib(default=b'none')  # camelcase-required\\n     changes = attr.ib(default=attr.Factory(dict))\\n     repositoryPHID = attr.ib(default=None)  # camelcase-required\\n \\n     def addchange(self, change):\\n         if not isinstance(change, phabchange):\\n             raise error.Abort(b'phabdiff.addchange only takes phabchanges')\\n         self.changes[change.currentPath] = pycompat.byteskwargs(\\n             attr.asdict(change)\\n         )\\n \\n \\n def maketext(pchange, ctx, fname):\\n     \\\"\\\"\\\"populate the phabchange for a text file\\\"\\\"\\\"\\n     repo = ctx.repo()\\n     fmatcher = match.exact([fname])\\n     diffopts = mdiff.diffopts(git=True, context=32767)\\n     _pfctx, _fctx, header, fhunks = next(\\n         patch.diffhunks(repo, ctx.p1(), ctx, fmatcher, opts=diffopts)\\n     )\\n \\n     for fhunk in fhunks:\\n         (oldOffset, oldLength, newOffset, newLength), lines = fhunk\\n         corpus = b''.join(lines[1:])\\n         shunk = list(header)\\n         shunk.extend(lines)\\n         _mf, _mt, addLines, delLines, _hb = patch.diffstatsum(\\n             patch.diffstatdata(util.iterlines(shunk))\\n         )\\n         pchange.addhunk(\\n             phabhunk(\\n                 oldOffset,\\n                 oldLength,\\n                 newOffset,\\n                 newLength,\\n                 corpus,\\n                 addLines,\\n                 delLines,\\n             )\\n         )\\n \\n \\n def uploadchunks(fctx, fphid):\\n     \\\"\\\"\\\"upload large binary files as separate chunks.\\n     Phab requests chunking over 8MiB, and splits into 4MiB chunks\\n     \\\"\\\"\\\"\\n     ui = fctx.repo().ui\\n     chunks = callconduit(ui, b'file.querychunks', {b'filePHID': fphid})\\n     with ui.makeprogress(\\n         _(b'uploading file chunks'), unit=_(b'chunks'), total=len(chunks)\\n     ) as progress:\\n         for chunk in chunks:\\n             progress.increment()\\n             if chunk[b'complete']:\\n                 continue\\n             bstart = int(chunk[b'byteStart'])\\n             bend = int(chunk[b'byteEnd'])\\n             callconduit(\\n                 ui,\\n                 b'file.uploadchunk',\\n                 {\\n                     b'filePHID': fphid,\\n                     b'byteStart': bstart,\\n                     b'data': base64.b64encode(fctx.data()[bstart:bend]),\\n                     b'dataEncoding': b'base64',\\n                 },\\n             )\\n \\n \\n def uploadfile(fctx):\\n     \\\"\\\"\\\"upload binary files to Phabricator\\\"\\\"\\\"\\n     repo = fctx.repo()\\n     ui = repo.ui\\n     fname = fctx.path()\\n     size = fctx.size()\\n     fhash = pycompat.bytestr(hashlib.sha256(fctx.data()).hexdigest())\\n \\n     # an allocate call is required first to see if an upload is even required\\n     # (Phab might already have it) and to determine if chunking is needed\\n     allocateparams = {\\n         b'name': fname,\\n         b'contentLength': size,\\n         b'contentHash': fhash,\\n     }\\n     filealloc = callconduit(ui, b'file.allocate', allocateparams)\\n     fphid = filealloc[b'filePHID']\\n \\n     if filealloc[b'upload']:\\n         ui.write(_(b'uploading %s\\\\n') % bytes(fctx))\\n         if not fphid:\\n             uploadparams = {\\n                 b'name': fname,\\n                 b'data_base64': base64.b64encode(fctx.data()),\\n             }\\n             fphid = callconduit(ui, b'file.upload', uploadparams)\\n         else:\\n             uploadchunks(fctx, fphid)\\n     else:\\n         ui.debug(b'server already has %s\\\\n' % bytes(fctx))\\n \\n     if not fphid:\\n         raise error.Abort(b'Upload of %s failed.' % bytes(fctx))\\n \\n     return fphid\\n \\n \\n def addoldbinary(pchange, fctx):\\n     \\\"\\\"\\\"add the metadata for the previous version of a binary file to the\\n     phabchange for the new version\\n     \\\"\\\"\\\"\\n     oldfctx = fctx.p1()\\n     if fctx.cmp(oldfctx):\\n         # Files differ, add the old one\\n         pchange.metadata[b'old:file:size'] = oldfctx.size()\\n         mimeguess, _enc = mimetypes.guess_type(\\n             encoding.unifromlocal(oldfctx.path())\\n         )\\n         if mimeguess:\\n             pchange.metadata[b'old:file:mime-type'] = pycompat.bytestr(\\n                 mimeguess\\n             )\\n         fphid = uploadfile(oldfctx)\\n         pchange.metadata[b'old:binary-phid'] = fphid\\n     else:\\n         # If it's left as IMAGE\\/BINARY web UI might try to display it\\n         pchange.fileType = DiffFileType.TEXT\\n         pchange.copynewmetadatatoold()\\n \\n \\n def makebinary(pchange, fctx):\\n     \\\"\\\"\\\"populate the phabchange for a binary file\\\"\\\"\\\"\\n     pchange.fileType = DiffFileType.BINARY\\n     fphid = uploadfile(fctx)\\n     pchange.metadata[b'new:binary-phid'] = fphid\\n     pchange.metadata[b'new:file:size'] = fctx.size()\\n     mimeguess, _enc = mimetypes.guess_type(encoding.unifromlocal(fctx.path()))\\n     if mimeguess:\\n         mimeguess = pycompat.bytestr(mimeguess)\\n         pchange.metadata[b'new:file:mime-type'] = mimeguess\\n         if mimeguess.startswith(b'image\\/'):\\n             pchange.fileType = DiffFileType.IMAGE\\n \\n \\n # Copied from mercurial\\/patch.py\\n gitmode = {b'l': b'120000', b'x': b'100755', b'': b'100644'}\\n \\n \\n def notutf8(fctx):\\n     \\\"\\\"\\\"detect non-UTF-8 text files since Phabricator requires them to be marked\\n     as binary\\n     \\\"\\\"\\\"\\n     try:\\n         fctx.data().decode('utf-8')\\n         if fctx.parents():\\n             fctx.p1().data().decode('utf-8')\\n         return False\\n     except UnicodeDecodeError:\\n         fctx.repo().ui.write(\\n             _(b'file %s detected as non-UTF-8, marked as binary\\\\n')\\n             % fctx.path()\\n         )\\n         return True\\n \\n \\n def addremoved(pdiff, ctx, removed):\\n     \\\"\\\"\\\"add removed files to the phabdiff. Shouldn't include moves\\\"\\\"\\\"\\n     for fname in removed:\\n         pchange = phabchange(\\n             currentPath=fname, oldPath=fname, type=DiffChangeType.DELETE\\n         )\\n         pchange.addoldmode(gitmode[ctx.p1()[fname].flags()])\\n         fctx = ctx.p1()[fname]\\n         if not (fctx.isbinary() or notutf8(fctx)):\\n             maketext(pchange, ctx, fname)\\n \\n         pdiff.addchange(pchange)\\n \\n \\n def addmodified(pdiff, ctx, modified):\\n     \\\"\\\"\\\"add modified files to the phabdiff\\\"\\\"\\\"\\n     for fname in modified:\\n         fctx = ctx[fname]\\n         pchange = phabchange(currentPath=fname, oldPath=fname)\\n         filemode = gitmode[ctx[fname].flags()]\\n         originalmode = gitmode[ctx.p1()[fname].flags()]\\n         if filemode != originalmode:\\n             pchange.addoldmode(originalmode)\\n             pchange.addnewmode(filemode)\\n \\n         if fctx.isbinary() or notutf8(fctx):\\n             makebinary(pchange, fctx)\\n             addoldbinary(pchange, fctx)\\n         else:\\n             maketext(pchange, ctx, fname)\\n \\n         pdiff.addchange(pchange)\\n \\n \\n def addadded(pdiff, ctx, added, removed):\\n     \\\"\\\"\\\"add file adds to the phabdiff, both new files and copies\\/moves\\\"\\\"\\\"\\n     # Keep track of files that've been recorded as moved\\/copied, so if there are\\n     # additional copies we can mark them (moves get removed from removed)\\n     copiedchanges = {}\\n     movedchanges = {}\\n     for fname in added:\\n         fctx = ctx[fname]\\n         pchange = phabchange(currentPath=fname)\\n \\n         filemode = gitmode[ctx[fname].flags()]\\n         renamed = fctx.renamed()\\n \\n         if renamed:\\n             originalfname = renamed[0]\\n             originalmode = gitmode[ctx.p1()[originalfname].flags()]\\n             pchange.oldPath = originalfname\\n \\n             if originalfname in removed:\\n                 origpchange = phabchange(\\n                     currentPath=originalfname,\\n                     oldPath=originalfname,\\n                     type=DiffChangeType.MOVE_AWAY,\\n                     awayPaths=[fname],\\n                 )\\n                 movedchanges[originalfname] = origpchange\\n                 removed.remove(originalfname)\\n                 pchange.type = DiffChangeType.MOVE_HERE\\n             elif originalfname in movedchanges:\\n                 movedchanges[originalfname].type = DiffChangeType.MULTICOPY\\n                 movedchanges[originalfname].awayPaths.append(fname)\\n                 pchange.type = DiffChangeType.COPY_HERE\\n             else:  # pure copy\\n                 if originalfname not in copiedchanges:\\n                     origpchange = phabchange(\\n                         currentPath=originalfname, type=DiffChangeType.COPY_AWAY\\n                     )\\n                     copiedchanges[originalfname] = origpchange\\n                 else:\\n                     origpchange = copiedchanges[originalfname]\\n                 origpchange.awayPaths.append(fname)\\n                 pchange.type = DiffChangeType.COPY_HERE\\n \\n             if filemode != originalmode:\\n                 pchange.addoldmode(originalmode)\\n                 pchange.addnewmode(filemode)\\n         else:  # Brand-new file\\n             pchange.addnewmode(gitmode[fctx.flags()])\\n             pchange.type = DiffChangeType.ADD\\n \\n         if fctx.isbinary() or notutf8(fctx):\\n             makebinary(pchange, fctx)\\n             if renamed:\\n                 addoldbinary(pchange, fctx)\\n         else:\\n             maketext(pchange, ctx, fname)\\n \\n         pdiff.addchange(pchange)\\n \\n     for _path, copiedchange in copiedchanges.items():\\n         pdiff.addchange(copiedchange)\\n     for _path, movedchange in movedchanges.items():\\n         pdiff.addchange(movedchange)\\n \\n \\n def creatediff(ctx):\\n     \\\"\\\"\\\"create a Differential Diff\\\"\\\"\\\"\\n     repo = ctx.repo()\\n     repophid = getrepophid(repo)\\n     # Create a \\\"Differential Diff\\\" via \\\"differential.creatediff\\\" API\\n     pdiff = phabdiff(\\n         sourceControlBaseRevision=b'%s' % ctx.p1().hex(),\\n         branch=b'%s' % ctx.branch(),\\n     )\\n     modified, added, removed, _d, _u, _i, _c = ctx.p1().status(ctx)\\n     # addadded will remove moved files from removed, so addremoved won't get\\n     # them\\n     addadded(pdiff, ctx, added, removed)\\n     addmodified(pdiff, ctx, modified)\\n     addremoved(pdiff, ctx, removed)\\n     if repophid:\\n         pdiff.repositoryPHID = repophid\\n     diff = callconduit(\\n         repo.ui,\\n         b'differential.creatediff',\\n         pycompat.byteskwargs(attr.asdict(pdiff)),\\n     )\\n     if not diff:\\n         raise error.Abort(_(b'cannot create diff for %s') % ctx)\\n     return diff\\n \\n \\n def writediffproperties(ctx, diff):\\n     \\\"\\\"\\\"write metadata to diff so patches could be applied losslessly\\\"\\\"\\\"\\n     # creatediff returns with a diffid but query returns with an id\\n     diffid = diff.get(b'diffid', diff.get(b'id'))\\n     params = {\\n         b'diff_id': diffid,\\n         b'name': b'hg:meta',\\n         b'data': templatefilters.json(\\n             {\\n                 b'user': ctx.user(),\\n                 b'date': b'%d %d' % ctx.date(),\\n                 b'branch': ctx.branch(),\\n                 b'node': ctx.hex(),\\n                 b'parent': ctx.p1().hex(),\\n             }\\n         ),\\n     }\\n     callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)\\n \\n     params = {\\n         b'diff_id': diffid,\\n         b'name': b'local:commits',\\n         b'data': templatefilters.json(\\n             {\\n                 ctx.hex(): {\\n                     b'author': stringutil.person(ctx.user()),\\n                     b'authorEmail': stringutil.email(ctx.user()),\\n                     b'time': int(ctx.date()[0]),\\n                     b'commit': ctx.hex(),\\n                     b'parents': [ctx.p1().hex()],\\n                     b'branch': ctx.branch(),\\n                 },\\n             }\\n         ),\\n     }\\n     callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)\\n \\n \\n def createdifferentialrevision(\\n     ctx,\\n     revid=None,\\n     parentrevphid=None,\\n     oldnode=None,\\n     olddiff=None,\\n     actions=None,\\n     comment=None,\\n ):\\n     \\\"\\\"\\\"create or update a Differential Revision\\n \\n     If revid is None, create a new Differential Revision, otherwise update\\n     revid. If parentrevphid is not None, set it as a dependency.\\n \\n     If oldnode is not None, check if the patch content (without commit message\\n     and metadata) has changed before creating another diff.\\n \\n     If actions is not None, they will be appended to the transaction.\\n     \\\"\\\"\\\"\\n     repo = ctx.repo()\\n     if oldnode:\\n         diffopts = mdiff.diffopts(git=True, context=32767)\\n         oldctx = repo.unfiltered()[oldnode]\\n         neednewdiff = getdiff(ctx, diffopts) != getdiff(oldctx, diffopts)\\n     else:\\n         neednewdiff = True\\n \\n     transactions = []\\n     if neednewdiff:\\n         diff = creatediff(ctx)\\n         transactions.append({b'type': b'update', b'value': diff[b'phid']})\\n         if comment:\\n             transactions.append({b'type': b'comment', b'value': comment})\\n     else:\\n         # Even if we don't need to upload a new diff because the patch content\\n         # does not change. We might still need to update its metadata so\\n         # pushers could know the correct node metadata.\\n         assert olddiff\\n         diff = olddiff\\n     writediffproperties(ctx, diff)\\n \\n     # Set the parent Revision every time, so commit re-ordering is picked-up\\n     if parentrevphid:\\n         transactions.append(\\n             {b'type': b'parents.set', b'value': [parentrevphid]}\\n         )\\n \\n     if actions:\\n         transactions += actions\\n \\n     # Parse commit message and update related fields.\\n     desc = ctx.description()\\n     info = callconduit(\\n         repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}\\n     )\\n     for k, v in info[b'fields'].items():\\n         if k in [b'title', b'summary', b'testPlan']:\\n             transactions.append({b'type': k, b'value': v})\\n \\n     params = {b'transactions': transactions}\\n     if revid is not None:\\n         # Update an existing Differential Revision\\n         params[b'objectIdentifier'] = revid\\n \\n     revision = callconduit(repo.ui, b'differential.revision.edit', params)\\n     if not revision:\\n         raise error.Abort(_(b'cannot create revision for %s') % ctx)\\n \\n     return revision, diff\\n \\n \\n def userphids(repo, names):\\n     \\\"\\\"\\\"convert user names to PHIDs\\\"\\\"\\\"\\n     names = [name.lower() for name in names]\\n     query = {b'constraints': {b'usernames': names}}\\n     result = callconduit(repo.ui, b'user.search', query)\\n     # username not found is not an error of the API. So check if we have missed\\n     # some names here.\\n     data = result[b'data']\\n     resolved = set(entry[b'fields'][b'username'].lower() for entry in data)\\n     unresolved = set(names) - resolved\\n     if unresolved:\\n         raise error.Abort(\\n             _(b'unknown username: %s') % b' '.join(sorted(unresolved))\\n         )\\n     return [entry[b'phid'] for entry in data]\\n \\n \\n @vcrcommand(\\n     b'phabsend',\\n     [\\n         (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),\\n         (b'', b'amend', True, _(b'update commit messages')),\\n         (b'', b'reviewer', [], _(b'specify reviewers')),\\n         (b'', b'blocker', [], _(b'specify blocking reviewers')),\\n         (\\n             b'm',\\n             b'comment',\\n             b'',\\n             _(b'add a comment to Revisions with new\\/updated Diffs'),\\n         ),\\n         (b'', b'confirm', None, _(b'ask for confirmation before sending')),\\n     ],\\n     _(b'REV [OPTIONS]'),\\n     helpcategory=command.CATEGORY_IMPORT_EXPORT,\\n )\\n def phabsend(ui, repo, *revs, **opts):\\n     \\\"\\\"\\\"upload changesets to Phabricator\\n \\n     If there are multiple revisions specified, they will be send as a stack\\n     with a linear dependencies relationship using the order specified by the\\n     revset.\\n \\n     For the first time uploading changesets, local tags will be created to\\n     maintain the association. After the first time, phabsend will check\\n     obsstore and tags information so it can figure out whether to update an\\n     existing Differential Revision, or create a new one.\\n \\n     If --amend is set, update commit messages so they have the\\n     ``Differential Revision`` URL, remove related tags. This is similar to what\\n     arcanist will do, and is more desired in author-push workflows. Otherwise,\\n     use local tags to record the ``Differential Revision`` association.\\n \\n     The --confirm option lets you confirm changesets before sending them. You\\n     can also add following to your configuration file to make it default\\n     behaviour::\\n \\n         [phabsend]\\n         confirm = true\\n \\n     phabsend will check obsstore and the above association to decide whether to\\n     update an existing Differential Revision, or create a new one.\\n     \\\"\\\"\\\"\\n     opts = pycompat.byteskwargs(opts)\\n     revs = list(revs) + opts.get(b'rev', [])\\n     revs = scmutil.revrange(repo, revs)\\n     revs.sort()  # ascending order to preserve topological parent\\/child in phab\\n \\n     if not revs:\\n         raise error.Abort(_(b'phabsend requires at least one changeset'))\\n     if opts.get(b'amend'):\\n         cmdutil.checkunfinished(repo)\\n \\n     # {newnode: (oldnode, olddiff, olddrev}\\n     oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])\\n \\n     confirm = ui.configbool(b'phabsend', b'confirm')\\n     confirm |= bool(opts.get(b'confirm'))\\n     if confirm:\\n         confirmed = _confirmbeforesend(repo, revs, oldmap)\\n         if not confirmed:\\n             raise error.Abort(_(b'phabsend cancelled'))\\n \\n     actions = []\\n     reviewers = opts.get(b'reviewer', [])\\n     blockers = opts.get(b'blocker', [])\\n     phids = []\\n     if reviewers:\\n         phids.extend(userphids(repo, reviewers))\\n     if blockers:\\n         phids.extend(\\n             map(lambda phid: b'blocking(%s)' % phid, userphids(repo, blockers))\\n         )\\n     if phids:\\n         actions.append({b'type': b'reviewers.add', b'value': phids})\\n \\n     drevids = []  # [int]\\n     diffmap = {}  # {newnode: diff}\\n \\n     # Send patches one by one so we know their Differential Revision PHIDs and\\n     # can provide dependency relationship\\n     lastrevphid = None\\n     for rev in revs:\\n         ui.debug(b'sending rev %d\\\\n' % rev)\\n         ctx = repo[rev]\\n \\n         # Get Differential Revision ID\\n         oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))\\n         if oldnode != ctx.node() or opts.get(b'amend'):\\n             # Create or update Differential Revision\\n             revision, diff = createdifferentialrevision(\\n                 ctx,\\n                 revid,\\n                 lastrevphid,\\n                 oldnode,\\n                 olddiff,\\n                 actions,\\n                 opts.get(b'comment'),\\n             )\\n             diffmap[ctx.node()] = diff\\n             newrevid = int(revision[b'object'][b'id'])\\n             newrevphid = revision[b'object'][b'phid']\\n             if revid:\\n                 action = b'updated'\\n             else:\\n                 action = b'created'\\n \\n             # Create a local tag to note the association, if commit message\\n             # does not have it already\\n             m = _differentialrevisiondescre.search(ctx.description())\\n             if not m or int(m.group('id')) != newrevid:\\n                 tagname = b'D%d' % newrevid\\n                 tags.tag(\\n                     repo,\\n                     tagname,\\n                     ctx.node(),\\n                     message=None,\\n                     user=None,\\n                     date=None,\\n                     local=True,\\n                 )\\n         else:\\n             # Nothing changed. But still set \\\"newrevphid\\\" so the next revision\\n             # could depend on this one and \\\"newrevid\\\" for the summary line.\\n             newrevphid = querydrev(repo, b'%d' % revid)[0][b'phid']\\n             newrevid = revid\\n             action = b'skipped'\\n \\n         actiondesc = ui.label(\\n             {\\n                 b'created': _(b'created'),\\n                 b'skipped': _(b'skipped'),\\n                 b'updated': _(b'updated'),\\n             }[action],\\n             b'phabricator.action.%s' % action,\\n         )\\n         drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')\\n         nodedesc = ui.label(bytes(ctx), b'phabricator.node')\\n         desc = ui.label(ctx.description().split(b'\\\\n')[0], b'phabricator.desc')\\n         ui.write(\\n             _(b'%s - %s - %s: %s\\\\n') % (drevdesc, actiondesc, nodedesc, desc)\\n         )\\n         drevids.append(newrevid)\\n         lastrevphid = newrevphid\\n \\n     # Update commit messages and remove tags\\n     if opts.get(b'amend'):\\n         unfi = repo.unfiltered()\\n         drevs = callconduit(ui, b'differential.query', {b'ids': drevids})\\n         with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):\\n             wnode = unfi[b'.'].node()\\n             mapping = {}  # {oldnode: [newnode]}\\n             for i, rev in enumerate(revs):\\n                 old = unfi[rev]\\n                 drevid = drevids[i]\\n                 drev = [d for d in drevs if int(d[b'id']) == drevid][0]\\n                 newdesc = getdescfromdrev(drev)\\n                 # Make sure commit message contain \\\"Differential Revision\\\"\\n                 if old.description() != newdesc:\\n                     if old.phase() == phases.public:\\n                         ui.warn(\\n                             _(b\\\"warning: not updating public commit %s\\\\n\\\")\\n                             % scmutil.formatchangeid(old)\\n                         )\\n                         continue\\n                     parents = [\\n                         mapping.get(old.p1().node(), (old.p1(),))[0],\\n                         mapping.get(old.p2().node(), (old.p2(),))[0],\\n                     ]\\n                     new = context.metadataonlyctx(\\n                         repo,\\n                         old,\\n                         parents=parents,\\n                         text=newdesc,\\n                         user=old.user(),\\n                         date=old.date(),\\n                         extra=old.extra(),\\n                     )\\n \\n                     newnode = new.commit()\\n \\n                     mapping[old.node()] = [newnode]\\n                     # Update diff property\\n                     # If it fails just warn and keep going, otherwise the DREV\\n                     # associations will be lost\\n                     try:\\n                         writediffproperties(unfi[newnode], diffmap[old.node()])\\n                     except util.urlerr.urlerror:\\n                         ui.warnnoi18n(\\n                             b'Failed to update metadata for D%d\\\\n' % drevid\\n                         )\\n                 # Remove local tags since it's no longer necessary\\n                 tagname = b'D%d' % drevid\\n                 if tagname in repo.tags():\\n                     tags.tag(\\n                         repo,\\n                         tagname,\\n                         nullid,\\n                         message=None,\\n                         user=None,\\n                         date=None,\\n                         local=True,\\n                     )\\n             scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)\\n             if wnode in mapping:\\n                 unfi.setparents(mapping[wnode][0])\\n \\n \\n # Map from \\\"hg:meta\\\" keys to header understood by \\\"hg import\\\". The order is\\n # consistent with \\\"hg export\\\" output.\\n _metanamemap = util.sortdict(\\n     [\\n         (b'user', b'User'),\\n         (b'date', b'Date'),\\n         (b'branch', b'Branch'),\\n         (b'node', b'Node ID'),\\n         (b'parent', b'Parent '),\\n     ]\\n )\\n \\n \\n def _confirmbeforesend(repo, revs, oldmap):\\n     url, token = readurltoken(repo.ui)\\n     ui = repo.ui\\n     for rev in revs:\\n         ctx = repo[rev]\\n         desc = ctx.description().splitlines()[0]\\n         oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))\\n         if drevid:\\n             drevdesc = ui.label(b'D%d' % drevid, b'phabricator.drev')\\n         else:\\n             drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')\\n \\n         ui.write(\\n             _(b'%s - %s: %s\\\\n')\\n             % (\\n                 drevdesc,\\n                 ui.label(bytes(ctx), b'phabricator.node'),\\n                 ui.label(desc, b'phabricator.desc'),\\n             )\\n         )\\n \\n     if ui.promptchoice(\\n         _(b'Send the above changes to %s (yn)?$$ &Yes $$ &No') % url\\n     ):\\n         return False\\n \\n     return True\\n \\n \\n _knownstatusnames = {\\n     b'accepted',\\n     b'needsreview',\\n     b'needsrevision',\\n     b'closed',\\n     b'abandoned',\\n     b'changesplanned',\\n }\\n \\n \\n def _getstatusname(drev):\\n     \\\"\\\"\\\"get normalized status name from a Differential Revision\\\"\\\"\\\"\\n     return drev[b'statusName'].replace(b' ', b'').lower()\\n \\n \\n # Small language to specify differential revisions. Support symbols: (), :X,\\n # +, and -.\\n \\n _elements = {\\n     # token-type: binding-strength, primary, prefix, infix, suffix\\n     b'(': (12, None, (b'group', 1, b')'), None, None),\\n     b':': (8, None, (b'ancestors', 8), None, None),\\n     b'&': (5, None, None, (b'and_', 5), None),\\n     b'+': (4, None, None, (b'add', 4), None),\\n     b'-': (4, None, None, (b'sub', 4), None),\\n     b')': (0, None, None, None, None),\\n     b'symbol': (0, b'symbol', None, None, None),\\n     b'end': (0, None, None, None, None),\\n }\\n \\n \\n def _tokenize(text):\\n     view = memoryview(text)  # zero-copy slice\\n     special = b'():+-& '\\n     pos = 0\\n     length = len(text)\\n     while pos \\u003c length:\\n         symbol = b''.join(\\n             itertools.takewhile(\\n                 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])\\n             )\\n         )\\n         if symbol:\\n             yield (b'symbol', symbol, pos)\\n             pos += len(symbol)\\n         else:  # special char, ignore space\\n             if text[pos : pos + 1] != b' ':\\n                 yield (text[pos : pos + 1], None, pos)\\n             pos += 1\\n     yield (b'end', None, pos)\\n \\n \\n def _parse(text):\\n     tree, pos = parser.parser(_elements).parse(_tokenize(text))\\n     if pos != len(text):\\n         raise error.ParseError(b'invalid token', pos)\\n     return tree\\n \\n \\n def _parsedrev(symbol):\\n     \\\"\\\"\\\"str -\\u003e int or None, ex. 'D45' -\\u003e 45; '12' -\\u003e 12; 'x' -\\u003e None\\\"\\\"\\\"\\n     if symbol.startswith(b'D') and symbol[1:].isdigit():\\n         return int(symbol[1:])\\n     if symbol.isdigit():\\n         return int(symbol)\\n \\n \\n def _prefetchdrevs(tree):\\n     \\\"\\\"\\\"return ({single-drev-id}, {ancestor-drev-id}) to prefetch\\\"\\\"\\\"\\n     drevs = set()\\n     ancestordrevs = set()\\n     op = tree[0]\\n     if op == b'symbol':\\n         r = _parsedrev(tree[1])\\n         if r:\\n             drevs.add(r)\\n     elif op == b'ancestors':\\n         r, a = _prefetchdrevs(tree[1])\\n         drevs.update(r)\\n         ancestordrevs.update(r)\\n         ancestordrevs.update(a)\\n     else:\\n         for t in tree[1:]:\\n             r, a = _prefetchdrevs(t)\\n             drevs.update(r)\\n             ancestordrevs.update(a)\\n     return drevs, ancestordrevs\\n \\n \\n def querydrev(repo, spec):\\n     \\\"\\\"\\\"return a list of \\\"Differential Revision\\\" dicts\\n \\n     spec is a string using a simple query language, see docstring in phabread\\n     for details.\\n \\n     A \\\"Differential Revision dict\\\" looks like:\\n \\n         {\\n             \\\"activeDiffPHID\\\": \\\"PHID-DIFF-xoqnjkobbm6k4dk6hi72\\\",\\n             \\\"authorPHID\\\": \\\"PHID-USER-tv3ohwc4v4jeu34otlye\\\",\\n             \\\"auxiliary\\\": {\\n               \\\"phabricator:depends-on\\\": [\\n                 \\\"PHID-DREV-gbapp366kutjebt7agcd\\\"\\n               ]\\n               \\\"phabricator:projects\\\": [],\\n             },\\n             \\\"branch\\\": \\\"default\\\",\\n             \\\"ccs\\\": [],\\n             \\\"commits\\\": [],\\n             \\\"dateCreated\\\": \\\"1499181406\\\",\\n             \\\"dateModified\\\": \\\"1499182103\\\",\\n             \\\"diffs\\\": [\\n               \\\"3\\\",\\n               \\\"4\\\",\\n             ],\\n             \\\"hashes\\\": [],\\n             \\\"id\\\": \\\"2\\\",\\n             \\\"lineCount\\\": \\\"2\\\",\\n             \\\"phid\\\": \\\"PHID-DREV-672qvysjcczopag46qty\\\",\\n             \\\"properties\\\": {},\\n             \\\"repositoryPHID\\\": \\\"PHID-REPO-hub2hx62ieuqeheznasv\\\",\\n             \\\"reviewers\\\": [],\\n             \\\"sourcePath\\\": null\\n             \\\"status\\\": \\\"0\\\",\\n             \\\"statusName\\\": \\\"Needs Review\\\",\\n             \\\"summary\\\": \\\"\\\",\\n             \\\"testPlan\\\": \\\"\\\",\\n             \\\"title\\\": \\\"example\\\",\\n             \\\"uri\\\": \\\"https:\\/\\/phab.example.com\\/D2\\\",\\n         }\\n     \\\"\\\"\\\"\\n     # TODO: replace differential.query and differential.querydiffs with\\n     # differential.diff.search because the former (and their output) are\\n     # frozen, and planned to be deprecated and removed.\\n \\n     def fetch(params):\\n         \\\"\\\"\\\"params -\\u003e single drev or None\\\"\\\"\\\"\\n         key = (params.get(b'ids') or params.get(b'phids') or [None])[0]\\n         if key in prefetched:\\n             return prefetched[key]\\n         drevs = callconduit(repo.ui, b'differential.query', params)\\n         # Fill prefetched with the result\\n         for drev in drevs:\\n             prefetched[drev[b'phid']] = drev\\n             prefetched[int(drev[b'id'])] = drev\\n         if key not in prefetched:\\n             raise error.Abort(\\n                 _(b'cannot get Differential Revision %r') % params\\n             )\\n         return prefetched[key]\\n \\n     def getstack(topdrevids):\\n         \\\"\\\"\\\"given a top, get a stack from the bottom, [id] -\\u003e [id]\\\"\\\"\\\"\\n         visited = set()\\n         result = []\\n         queue = [{b'ids': [i]} for i in topdrevids]\\n         while queue:\\n             params = queue.pop()\\n             drev = fetch(params)\\n             if drev[b'id'] in visited:\\n                 continue\\n             visited.add(drev[b'id'])\\n             result.append(int(drev[b'id']))\\n             auxiliary = drev.get(b'auxiliary', {})\\n             depends = auxiliary.get(b'phabricator:depends-on', [])\\n             for phid in depends:\\n                 queue.append({b'phids': [phid]})\\n         result.reverse()\\n         return smartset.baseset(result)\\n \\n     # Initialize prefetch cache\\n     prefetched = {}  # {id or phid: drev}\\n \\n     tree = _parse(spec)\\n     drevs, ancestordrevs = _prefetchdrevs(tree)\\n \\n     # developer config: phabricator.batchsize\\n     batchsize = repo.ui.configint(b'phabricator', b'batchsize')\\n \\n     # Prefetch Differential Revisions in batch\\n     tofetch = set(drevs)\\n     for r in ancestordrevs:\\n         tofetch.update(range(max(1, r - batchsize), r + 1))\\n     if drevs:\\n         fetch({b'ids': list(tofetch)})\\n     validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))\\n \\n     # Walk through the tree, return smartsets\\n     def walk(tree):\\n         op = tree[0]\\n         if op == b'symbol':\\n             drev = _parsedrev(tree[1])\\n             if drev:\\n                 return smartset.baseset([drev])\\n             elif tree[1] in _knownstatusnames:\\n                 drevs = [\\n                     r\\n                     for r in validids\\n                     if _getstatusname(prefetched[r]) == tree[1]\\n                 ]\\n                 return smartset.baseset(drevs)\\n             else:\\n                 raise error.Abort(_(b'unknown symbol: %s') % tree[1])\\n         elif op in {b'and_', b'add', b'sub'}:\\n             assert len(tree) == 3\\n             return getattr(operator, op)(walk(tree[1]), walk(tree[2]))\\n         elif op == b'group':\\n             return walk(tree[1])\\n         elif op == b'ancestors':\\n             return getstack(walk(tree[1]))\\n         else:\\n             raise error.ProgrammingError(b'illegal tree: %r' % tree)\\n \\n     return [prefetched[r] for r in walk(tree)]\\n \\n \\n def getdescfromdrev(drev):\\n     \\\"\\\"\\\"get description (commit message) from \\\"Differential Revision\\\"\\n \\n     This is similar to differential.getcommitmessage API. But we only care\\n     about limited fields: title, summary, test plan, and URL.\\n     \\\"\\\"\\\"\\n     title = drev[b'title']\\n     summary = drev[b'summary'].rstrip()\\n     testplan = drev[b'testPlan'].rstrip()\\n     if testplan:\\n         testplan = b'Test Plan:\\\\n%s' % testplan\\n     uri = b'Differential Revision: %s' % drev[b'uri']\\n     return b'\\\\n\\\\n'.join(filter(None, [title, summary, testplan, uri]))\\n \\n \\n def getdiffmeta(diff):\\n     \\\"\\\"\\\"get commit metadata (date, node, user, p1) from a diff object\\n \\n     The metadata could be \\\"hg:meta\\\", sent by phabsend, like:\\n \\n         \\\"properties\\\": {\\n           \\\"hg:meta\\\": {\\n             \\\"branch\\\": \\\"default\\\",\\n             \\\"date\\\": \\\"1499571514 25200\\\",\\n             \\\"node\\\": \\\"98c08acae292b2faf60a279b4189beb6cff1414d\\\",\\n             \\\"user\\\": \\\"Foo Bar \\u003cfoo@example.com\\u003e\\\",\\n             \\\"parent\\\": \\\"6d0abad76b30e4724a37ab8721d630394070fe16\\\"\\n           }\\n         }\\n \\n     Or converted from \\\"local:commits\\\", sent by \\\"arc\\\", like:\\n \\n         \\\"properties\\\": {\\n           \\\"local:commits\\\": {\\n             \\\"98c08acae292b2faf60a279b4189beb6cff1414d\\\": {\\n               \\\"author\\\": \\\"Foo Bar\\\",\\n               \\\"authorEmail\\\": \\\"foo@example.com\\\"\\n               \\\"branch\\\": \\\"default\\\",\\n               \\\"commit\\\": \\\"98c08acae292b2faf60a279b4189beb6cff1414d\\\",\\n               \\\"local\\\": \\\"1000\\\",\\n               \\\"message\\\": \\\"...\\\",\\n               \\\"parents\\\": [\\\"6d0abad76b30e4724a37ab8721d630394070fe16\\\"],\\n               \\\"rev\\\": \\\"98c08acae292b2faf60a279b4189beb6cff1414d\\\",\\n               \\\"summary\\\": \\\"...\\\",\\n               \\\"tag\\\": \\\"\\\",\\n               \\\"time\\\": 1499546314,\\n             }\\n           }\\n         }\\n \\n     Note: metadata extracted from \\\"local:commits\\\" will lose time zone\\n     information.\\n     \\\"\\\"\\\"\\n     props = diff.get(b'properties') or {}\\n     meta = props.get(b'hg:meta')\\n     if not meta:\\n         if props.get(b'local:commits'):\\n             commit = sorted(props[b'local:commits'].values())[0]\\n             meta = {}\\n             if b'author' in commit and b'authorEmail' in commit:\\n                 meta[b'user'] = b'%s \\u003c%s\\u003e' % (\\n                     commit[b'author'],\\n                     commit[b'authorEmail'],\\n                 )\\n             if b'time' in commit:\\n                 meta[b'date'] = b'%d 0' % int(commit[b'time'])\\n             if b'branch' in commit:\\n                 meta[b'branch'] = commit[b'branch']\\n             node = commit.get(b'commit', commit.get(b'rev'))\\n             if node:\\n                 meta[b'node'] = node\\n             if len(commit.get(b'parents', ())) \\u003e= 1:\\n                 meta[b'parent'] = commit[b'parents'][0]\\n         else:\\n             meta = {}\\n     if b'date' not in meta and b'dateCreated' in diff:\\n         meta[b'date'] = b'%s 0' % diff[b'dateCreated']\\n     if b'branch' not in meta and diff.get(b'branch'):\\n         meta[b'branch'] = diff[b'branch']\\n     if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):\\n         meta[b'parent'] = diff[b'sourceControlBaseRevision']\\n     return meta\\n \\n \\n-def readpatch(repo, drevs, write):\\n+def readpatch(ui, drevs, write):\\n     \\\"\\\"\\\"generate plain-text patch readable by 'hg import'\\n \\n     write is usually ui.write. drevs is what \\\"querydrev\\\" returns, results of\\n     \\\"differential.query\\\".\\n     \\\"\\\"\\\"\\n     # Prefetch hg:meta property for all diffs\\n     diffids = sorted(set(max(int(v) for v in drev[b'diffs']) for drev in drevs))\\n-    diffs = callconduit(repo.ui, b'differential.querydiffs', {b'ids': diffids})\\n+    diffs = callconduit(ui, b'differential.querydiffs', {b'ids': diffids})\\n \\n     # Generate patch for each drev\\n     for drev in drevs:\\n-        repo.ui.note(_(b'reading D%s\\\\n') % drev[b'id'])\\n+        ui.note(_(b'reading D%s\\\\n') % drev[b'id'])\\n \\n         diffid = max(int(v) for v in drev[b'diffs'])\\n-        body = callconduit(\\n-            repo.ui, b'differential.getrawdiff', {b'diffID': diffid}\\n-        )\\n+        body = callconduit(ui, b'differential.getrawdiff', {b'diffID': diffid})\\n         desc = getdescfromdrev(drev)\\n         header = b'# HG changeset patch\\\\n'\\n \\n         # Try to preserve metadata from hg:meta property. Write hg patch\\n         # headers that can be read by the \\\"import\\\" command. See patchheadermap\\n         # and extract in mercurial\\/patch.py for supported headers.\\n         meta = getdiffmeta(diffs[b'%d' % diffid])\\n         for k in _metanamemap.keys():\\n             if k in meta:\\n                 header += b'# %s %s\\\\n' % (_metanamemap[k], meta[k])\\n \\n         content = b'%s%s\\\\n%s' % (header, desc, body)\\n         write(content)\\n \\n \\n @vcrcommand(\\n     b'phabread',\\n     [(b'', b'stack', False, _(b'read dependencies'))],\\n     _(b'DREVSPEC [OPTIONS]'),\\n     helpcategory=command.CATEGORY_IMPORT_EXPORT,\\n )\\n def phabread(ui, repo, spec, **opts):\\n     \\\"\\\"\\\"print patches from Phabricator suitable for importing\\n \\n     DREVSPEC could be a Differential Revision identity, like ``D123``, or just\\n     the number ``123``. It could also have common operators like ``+``, ``-``,\\n     ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to\\n     select a stack.\\n \\n     ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``\\n     could be used to filter patches by status. For performance reason, they\\n     only represent a subset of non-status selections and cannot be used alone.\\n \\n     For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude\\n     D2 and D4. ``:D9 & needsreview`` selects \\\"Needs Review\\\" revisions in a\\n     stack up to D9.\\n \\n     If --stack is given, follow dependencies information and read all patches.\\n     It is equivalent to the ``:`` operator.\\n     \\\"\\\"\\\"\\n     opts = pycompat.byteskwargs(opts)\\n     if opts.get(b'stack'):\\n         spec = b':(%s)' % spec\\n     drevs = querydrev(repo, spec)\\n-    readpatch(repo, drevs, ui.write)\\n+    readpatch(repo.ui, drevs, ui.write)\\n \\n \\n @vcrcommand(\\n     b'phabupdate',\\n     [\\n         (b'', b'accept', False, _(b'accept revisions')),\\n         (b'', b'reject', False, _(b'reject revisions')),\\n         (b'', b'abandon', False, _(b'abandon revisions')),\\n         (b'', b'reclaim', False, _(b'reclaim revisions')),\\n         (b'm', b'comment', b'', _(b'comment on the last revision')),\\n     ],\\n     _(b'DREVSPEC [OPTIONS]'),\\n     helpcategory=command.CATEGORY_IMPORT_EXPORT,\\n )\\n def phabupdate(ui, repo, spec, **opts):\\n     \\\"\\\"\\\"update Differential Revision in batch\\n \\n     DREVSPEC selects revisions. See :hg:`help phabread` for its usage.\\n     \\\"\\\"\\\"\\n     opts = pycompat.byteskwargs(opts)\\n     flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]\\n     if len(flags) \\u003e 1:\\n         raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))\\n \\n     actions = []\\n     for f in flags:\\n         actions.append({b'type': f, b'value': True})\\n \\n     drevs = querydrev(repo, spec)\\n     for i, drev in enumerate(drevs):\\n         if i + 1 == len(drevs) and opts.get(b'comment'):\\n             actions.append({b'type': b'comment', b'value': opts[b'comment']})\\n         if actions:\\n             params = {\\n                 b'objectIdentifier': drev[b'phid'],\\n                 b'transactions': actions,\\n             }\\n             callconduit(ui, b'differential.revision.edit', params)\\n \\n \\n @eh.templatekeyword(b'phabreview', requires={b'ctx'})\\n def template_review(context, mapping):\\n     \\\"\\\"\\\":phabreview: Object describing the review for this changeset.\\n     Has attributes `url` and `id`.\\n     \\\"\\\"\\\"\\n     ctx = context.resource(mapping, b'ctx')\\n     m = _differentialrevisiondescre.search(ctx.description())\\n     if m:\\n         return templateutil.hybriddict(\\n             {b'url': m.group('url'), b'id': b\\\"D%s\\\" % m.group('id'),}\\n         )\\n     else:\\n         tags = ctx.repo().nodetags(ctx.node())\\n         for t in tags:\\n             if _differentialrevisiontagre.match(t):\\n                 url = ctx.repo().ui.config(b'phabricator', b'url')\\n                 if not url.endswith(b'\\/'):\\n                     url += b'\\/'\\n                 url += t\\n \\n                 return templateutil.hybriddict({b'url': url, b'id': t,})\\n     return None\\n \\n \\n @eh.templatekeyword(b'phabstatus', requires={b'ctx', b'repo', b'ui'})\\n def template_status(context, mapping):\\n     \\\"\\\"\\\":phabstatus: String. Status of Phabricator differential.\\n     \\\"\\\"\\\"\\n     ctx = context.resource(mapping, b'ctx')\\n     repo = context.resource(mapping, b'repo')\\n     ui = context.resource(mapping, b'ui')\\n \\n     rev = ctx.rev()\\n     try:\\n         drevid = getdrevmap(repo, [rev])[rev]\\n     except KeyError:\\n         return None\\n     drevs = callconduit(ui, b'differential.query', {b'ids': [drevid]})\\n     for drev in drevs:\\n         if int(drev[b'id']) == drevid:\\n             return templateutil.hybriddict(\\n                 {b'url': drev[b'uri'], b'status': drev[b'statusName'],}\\n             )\\n     return None\\n \\n \\n @show.showview(b'phabstatus', csettopic=b'work')\\n def phabstatusshowview(ui, repo, displayer):\\n     \\\"\\\"\\\"Phabricator differiential status\\\"\\\"\\\"\\n     revs = repo.revs('sort(_underway(), topo)')\\n     drevmap = getdrevmap(repo, revs)\\n     unknownrevs, drevids, revsbydrevid = [], set([]), {}\\n     for rev, drevid in pycompat.iteritems(drevmap):\\n         if drevid is not None:\\n             drevids.add(drevid)\\n             revsbydrevid.setdefault(drevid, set([])).add(rev)\\n         else:\\n             unknownrevs.append(rev)\\n \\n     drevs = callconduit(ui, b'differential.query', {b'ids': list(drevids)})\\n     drevsbyrev = {}\\n     for drev in drevs:\\n         for rev in revsbydrevid[int(drev[b'id'])]:\\n             drevsbyrev[rev] = drev\\n \\n     def phabstatus(ctx):\\n         drev = drevsbyrev[ctx.rev()]\\n         status = ui.label(\\n             b'%(statusName)s' % drev,\\n             b'phabricator.status.%s' % _getstatusname(drev),\\n         )\\n         ui.write(b\\\"\\\\n%s %s\\\\n\\\" % (drev[b'uri'], status))\\n \\n     revs -= smartset.baseset(unknownrevs)\\n     revdag = graphmod.dagwalker(repo, revs)\\n \\n     ui.setconfig(b'experimental', b'graphshorten', True)\\n     displayer._exthook = phabstatus\\n     nodelen = show.longestshortest(repo, revs)\\n     logcmdutil.displaygraph(\\n         ui,\\n         repo,\\n         revdag,\\n         displayer,\\n         graphmod.asciiedges,\\n         props={b'nodelen': nodelen},\\n     )\\n\"}]}],\"properties\":[]}},\"error_code\":null,\"error_info\":null}"
                }, 
                "headers": {
                    "x-frame-options": [
                        "Deny"
                    ], 
                    "strict-transport-security": [
                        "max-age=0; includeSubdomains; preload"
                    ], 
                    "transfer-encoding": [
                        "chunked"
                    ], 
                    "cache-control": [
                        "no-store"
                    ], 
                    "referrer-policy": [
                        "no-referrer"
                    ], 
                    "x-content-type-options": [
                        "nosniff"
                    ], 
                    "x-xss-protection": [
                        "1; mode=block"
                    ], 
                    "server": [
                        "Apache/2.4.10 (Debian)"
                    ], 
                    "date": [
                        "Wed, 04 Mar 2020 22:05:22 GMT"
                    ], 
                    "content-type": [
                        "application/json"
                    ], 
                    "expires": [
                        "Sat, 01 Jan 2000 00:00:00 GMT"
                    ]
                }, 
                "status": {
                    "message": "OK", 
                    "code": 200
                }
            }, 
            "request": {
                "body": "params=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22ids%22%3A+%5B20441%2C+20442%2C+20443%5D%7D&output=json&__conduit__=1", 
                "uri": "https://phab.mercurial-scm.org//api/differential.querydiffs", 
                "headers": {
                    "content-length": [
                        "165"
                    ], 
                    "accept": [
                        "application/mercurial-0.1"
                    ], 
                    "host": [
                        "phab.mercurial-scm.org"
                    ], 
                    "user-agent": [
                        "mercurial/proto-1.0 (Mercurial 5.3+33-bb58931d0c4f)"
                    ], 
                    "content-type": [
                        "application/x-www-form-urlencoded"
                    ]
                }, 
                "method": "POST"
            }
        }, 
        {
            "response": {
                "body": {
                    "string": "{\"result\":\"diff --git a\\/hgext\\/phabricator.py b\\/hgext\\/phabricator.py\\n--- a\\/hgext\\/phabricator.py\\n+++ b\\/hgext\\/phabricator.py\\n@@ -1609,7 +1609,7 @@\\n     return meta\\n \\n \\n-def readpatch(repo, drevs, write):\\n+def readpatch(ui, drevs, write):\\n     \\\"\\\"\\\"generate plain-text patch readable by 'hg import'\\n \\n     write is usually ui.write. drevs is what \\\"querydrev\\\" returns, results of\\n@@ -1617,16 +1617,14 @@\\n     \\\"\\\"\\\"\\n     # Prefetch hg:meta property for all diffs\\n     diffids = sorted(set(max(int(v) for v in drev[b'diffs']) for drev in drevs))\\n-    diffs = callconduit(repo.ui, b'differential.querydiffs', {b'ids': diffids})\\n+    diffs = callconduit(ui, b'differential.querydiffs', {b'ids': diffids})\\n \\n     # Generate patch for each drev\\n     for drev in drevs:\\n-        repo.ui.note(_(b'reading D%s\\\\n') % drev[b'id'])\\n+        ui.note(_(b'reading D%s\\\\n') % drev[b'id'])\\n \\n         diffid = max(int(v) for v in drev[b'diffs'])\\n-        body = callconduit(\\n-            repo.ui, b'differential.getrawdiff', {b'diffID': diffid}\\n-        )\\n+        body = callconduit(ui, b'differential.getrawdiff', {b'diffID': diffid})\\n         desc = getdescfromdrev(drev)\\n         header = b'# HG changeset patch\\\\n'\\n \\n@@ -1671,7 +1669,7 @@\\n     if opts.get(b'stack'):\\n         spec = b':(%s)' % spec\\n     drevs = querydrev(repo, spec)\\n-    readpatch(repo, drevs, ui.write)\\n+    readpatch(repo.ui, drevs, ui.write)\\n \\n \\n @vcrcommand(\\n\\n\",\"error_code\":null,\"error_info\":null}"
                }, 
                "headers": {
                    "x-frame-options": [
                        "Deny"
                    ], 
                    "strict-transport-security": [
                        "max-age=0; includeSubdomains; preload"
                    ], 
                    "transfer-encoding": [
                        "chunked"
                    ], 
                    "cache-control": [
                        "no-store"
                    ], 
                    "referrer-policy": [
                        "no-referrer"
                    ], 
                    "x-content-type-options": [
                        "nosniff"
                    ], 
                    "x-xss-protection": [
                        "1; mode=block"
                    ], 
                    "server": [
                        "Apache/2.4.10 (Debian)"
                    ], 
                    "date": [
                        "Wed, 04 Mar 2020 22:05:23 GMT"
                    ], 
                    "content-type": [
                        "application/json"
                    ], 
                    "expires": [
                        "Sat, 01 Jan 2000 00:00:00 GMT"
                    ]
                }, 
                "status": {
                    "message": "OK", 
                    "code": 200
                }
            }, 
            "request": {
                "body": "params=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22diffID%22%3A+20441%7D&output=json&__conduit__=1", 
                "uri": "https://phab.mercurial-scm.org//api/differential.getrawdiff", 
                "headers": {
                    "content-length": [
                        "144"
                    ], 
                    "accept": [
                        "application/mercurial-0.1"
                    ], 
                    "host": [
                        "phab.mercurial-scm.org"
                    ], 
                    "user-agent": [
                        "mercurial/proto-1.0 (Mercurial 5.3+33-bb58931d0c4f)"
                    ], 
                    "content-type": [
                        "application/x-www-form-urlencoded"
                    ]
                }, 
                "method": "POST"
            }
        }, 
        {
            "response": {
                "body": {
                    "string": "{\"result\":\"diff --git a\\/hgext\\/phabricator.py b\\/hgext\\/phabricator.py\\n--- a\\/hgext\\/phabricator.py\\n+++ b\\/hgext\\/phabricator.py\\n@@ -1040,11 +1040,11 @@\\n     return revision, diff\\n \\n \\n-def userphids(repo, names):\\n+def userphids(ui, names):\\n     \\\"\\\"\\\"convert user names to PHIDs\\\"\\\"\\\"\\n     names = [name.lower() for name in names]\\n     query = {b'constraints': {b'usernames': names}}\\n-    result = callconduit(repo.ui, b'user.search', query)\\n+    result = callconduit(ui, b'user.search', query)\\n     # username not found is not an error of the API. So check if we have missed\\n     # some names here.\\n     data = result[b'data']\\n@@ -1127,10 +1127,13 @@\\n     blockers = opts.get(b'blocker', [])\\n     phids = []\\n     if reviewers:\\n-        phids.extend(userphids(repo, reviewers))\\n+        phids.extend(userphids(repo.ui, reviewers))\\n     if blockers:\\n         phids.extend(\\n-            map(lambda phid: b'blocking(%s)' % phid, userphids(repo, blockers))\\n+            map(\\n+                lambda phid: b'blocking(%s)' % phid,\\n+                userphids(repo.ui, blockers),\\n+            )\\n         )\\n     if phids:\\n         actions.append({b'type': b'reviewers.add', b'value': phids})\\n\\n\",\"error_code\":null,\"error_info\":null}"
                }, 
                "headers": {
                    "x-frame-options": [
                        "Deny"
                    ], 
                    "strict-transport-security": [
                        "max-age=0; includeSubdomains; preload"
                    ], 
                    "transfer-encoding": [
                        "chunked"
                    ], 
                    "cache-control": [
                        "no-store"
                    ], 
                    "referrer-policy": [
                        "no-referrer"
                    ], 
                    "x-content-type-options": [
                        "nosniff"
                    ], 
                    "x-xss-protection": [
                        "1; mode=block"
                    ], 
                    "server": [
                        "Apache/2.4.10 (Debian)"
                    ], 
                    "date": [
                        "Wed, 04 Mar 2020 22:05:23 GMT"
                    ], 
                    "content-type": [
                        "application/json"
                    ], 
                    "expires": [
                        "Sat, 01 Jan 2000 00:00:00 GMT"
                    ]
                }, 
                "status": {
                    "message": "OK", 
                    "code": 200
                }
            }, 
            "request": {
                "body": "params=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22diffID%22%3A+20443%7D&output=json&__conduit__=1", 
                "uri": "https://phab.mercurial-scm.org//api/differential.getrawdiff", 
                "headers": {
                    "content-length": [
                        "144"
                    ], 
                    "accept": [
                        "application/mercurial-0.1"
                    ], 
                    "host": [
                        "phab.mercurial-scm.org"
                    ], 
                    "user-agent": [
                        "mercurial/proto-1.0 (Mercurial 5.3+33-bb58931d0c4f)"
                    ], 
                    "content-type": [
                        "application/x-www-form-urlencoded"
                    ]
                }, 
                "method": "POST"
            }
        }, 
        {
            "response": {
                "body": {
                    "string": "{\"result\":\"diff --git a\\/hgext\\/phabricator.py b\\/hgext\\/phabricator.py\\n--- a\\/hgext\\/phabricator.py\\n+++ b\\/hgext\\/phabricator.py\\n@@ -1183,7 +1183,7 @@\\n         else:\\n             # Nothing changed. But still set \\\"newrevphid\\\" so the next revision\\n             # could depend on this one and \\\"newrevid\\\" for the summary line.\\n-            newrevphid = querydrev(repo, b'%d' % revid)[0][b'phid']\\n+            newrevphid = querydrev(repo.ui, b'%d' % revid)[0][b'phid']\\n             newrevid = revid\\n             action = b'skipped'\\n \\n@@ -1398,7 +1398,7 @@\\n     return drevs, ancestordrevs\\n \\n \\n-def querydrev(repo, spec):\\n+def querydrev(ui, spec):\\n     \\\"\\\"\\\"return a list of \\\"Differential Revision\\\" dicts\\n \\n     spec is a string using a simple query language, see docstring in phabread\\n@@ -1449,7 +1449,7 @@\\n         key = (params.get(b'ids') or params.get(b'phids') or [None])[0]\\n         if key in prefetched:\\n             return prefetched[key]\\n-        drevs = callconduit(repo.ui, b'differential.query', params)\\n+        drevs = callconduit(ui, b'differential.query', params)\\n         # Fill prefetched with the result\\n         for drev in drevs:\\n             prefetched[drev[b'phid']] = drev\\n@@ -1486,7 +1486,7 @@\\n     drevs, ancestordrevs = _prefetchdrevs(tree)\\n \\n     # developer config: phabricator.batchsize\\n-    batchsize = repo.ui.configint(b'phabricator', b'batchsize')\\n+    batchsize = ui.configint(b'phabricator', b'batchsize')\\n \\n     # Prefetch Differential Revisions in batch\\n     tofetch = set(drevs)\\n@@ -1668,7 +1668,7 @@\\n     opts = pycompat.byteskwargs(opts)\\n     if opts.get(b'stack'):\\n         spec = b':(%s)' % spec\\n-    drevs = querydrev(repo, spec)\\n+    drevs = querydrev(repo.ui, spec)\\n     readpatch(repo.ui, drevs, ui.write)\\n \\n \\n@@ -1698,7 +1698,7 @@\\n     for f in flags:\\n         actions.append({b'type': f, b'value': True})\\n \\n-    drevs = querydrev(repo, spec)\\n+    drevs = querydrev(repo.ui, spec)\\n     for i, drev in enumerate(drevs):\\n         if i + 1 == len(drevs) and opts.get(b'comment'):\\n             actions.append({b'type': b'comment', b'value': opts[b'comment']})\\n\\n\",\"error_code\":null,\"error_info\":null}"
                }, 
                "headers": {
                    "x-frame-options": [
                        "Deny"
                    ], 
                    "strict-transport-security": [
                        "max-age=0; includeSubdomains; preload"
                    ], 
                    "transfer-encoding": [
                        "chunked"
                    ], 
                    "cache-control": [
                        "no-store"
                    ], 
                    "referrer-policy": [
                        "no-referrer"
                    ], 
                    "x-content-type-options": [
                        "nosniff"
                    ], 
                    "x-xss-protection": [
                        "1; mode=block"
                    ], 
                    "server": [
                        "Apache/2.4.10 (Debian)"
                    ], 
                    "date": [
                        "Wed, 04 Mar 2020 22:05:23 GMT"
                    ], 
                    "content-type": [
                        "application/json"
                    ], 
                    "expires": [
                        "Sat, 01 Jan 2000 00:00:00 GMT"
                    ]
                }, 
                "status": {
                    "message": "OK", 
                    "code": 200
                }
            }, 
            "request": {
                "body": "params=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22diffID%22%3A+20442%7D&output=json&__conduit__=1", 
                "uri": "https://phab.mercurial-scm.org//api/differential.getrawdiff", 
                "headers": {
                    "content-length": [
                        "144"
                    ], 
                    "accept": [
                        "application/mercurial-0.1"
                    ], 
                    "host": [
                        "phab.mercurial-scm.org"
                    ], 
                    "user-agent": [
                        "mercurial/proto-1.0 (Mercurial 5.3+33-bb58931d0c4f)"
                    ], 
                    "content-type": [
                        "application/x-www-form-urlencoded"
                    ]
                }, 
                "method": "POST"
            }
        }
    ]
}