changeset 42371:ffab9eed3921

merge with stable
author Augie Fackler <augie@google.com>
date Mon, 20 May 2019 11:40:47 -0400
parents de65ae32b82d (diff) 2338bdea4474 (current diff)
children e0ac310bd033
files mercurial/localrepo.py
diffstat 156 files changed, 8574 insertions(+), 3915 deletions(-) [+]
line wrap: on
line diff
--- a/contrib/all-revsets.txt	Mon May 20 10:08:28 2019 +0200
+++ b/contrib/all-revsets.txt	Mon May 20 11:40:47 2019 -0400
@@ -154,3 +154,6 @@
 roots(matching(tip, "author"))
 roots(matching(tip, "author")) and -10000:-1
 (-10000:-1) and roots(matching(tip, "author"))
+only(max(head()))
+only(max(head()), min(head()))
+only(max(head()), limit(head(), 1, 1))
--- a/contrib/automation/README.rst	Mon May 20 10:08:28 2019 +0200
+++ b/contrib/automation/README.rst	Mon May 20 11:40:47 2019 -0400
@@ -101,9 +101,14 @@
 * Storage costs for AMI / EBS snapshots. This should be just a few pennies
   per month.
 
-When running EC2 instances, you'll be billed accordingly. By default, we
-use *small* instances, like ``t3.medium``. This instance type costs ~$0.07 per
-hour.
+When running EC2 instances, you'll be billed accordingly. Default instance
+types vary by operation. We try to be respectful of your money when choosing
+defaults. e.g. for Windows instances which are billed per hour, we use e.g.
+``t3.medium`` instances, which cost ~$0.07 per hour. For operations that
+scale well to many CPUs like running Linux tests, we may use a more powerful
+instance like ``c5.9xlarge``. However, since Linux instances are billed
+per second and the cost of running an e.g. ``c5.9xlarge`` for half the time
+of a ``c5.4xlarge`` is roughly the same, the choice is justified.
 
 .. note::
 
@@ -125,3 +130,54 @@
 To purge all EC2 resources that we manage::
 
    $ automation.py purge-ec2-resources
+
+Remote Machine Interfaces
+=========================
+
+The code that connects to a remote machine and executes things is
+theoretically machine agnostic as long as the remote machine conforms to
+an *interface*. In other words, to perform actions like running tests
+remotely or triggering packaging, it shouldn't matter if the remote machine
+is an EC2 instance, a virtual machine, etc. This section attempts to document
+the interface that remote machines need to provide in order to be valid
+*targets* for remote execution. These interfaces are often not ideal nor
+the most flexible. Instead, they have often evolved as the requirements of
+our automation code have evolved.
+
+Linux
+-----
+
+Remote Linux machines expose an SSH server on port 22. The SSH server
+must allow the ``hg`` user to authenticate using the SSH key generated by
+the automation code. The ``hg`` user should be part of the ``hg`` group
+and it should have ``sudo`` access without password prompting.
+
+The SSH channel must support SFTP to facilitate transferring files from
+client to server.
+
+``/bin/bash`` must be executable and point to a bash shell executable.
+
+The ``/hgdev`` directory must exist and all its content owned by ``hg::hg``.
+
+The ``/hgdev/pyenv`` directory should contain an installation of
+``pyenv``. Various Python distributions should be installed. The exact
+versions shouldn't matter. ``pyenv global`` should have been run so
+``/hgdev/pyenv/shims/`` is populated with redirector scripts that point
+to the appropriate Python executable.
+
+The ``/hgdev/venv-bootstrap`` directory must contain a virtualenv
+with Mercurial installed. The ``/hgdev/venv-bootstrap/bin/hg`` executable
+is referenced by various scripts and the client.
+
+The ``/hgdev/src`` directory MUST contain a clone of the Mercurial
+source code. The state of the working directory is not important.
+
+In order to run tests, the ``/hgwork`` directory will be created.
+This may require running various ``mkfs.*`` executables and ``mount``
+to provision a new filesystem. This will require elevated privileges
+via ``sudo``.
+
+Various dependencies to run the Mercurial test harness are also required.
+Documenting them is beyond the scope of this document. Various tests
+also require other optional dependencies and missing dependencies will
+be printed by the test runner when a test is skipped.
--- a/contrib/automation/hgautomation/__init__.py	Mon May 20 10:08:28 2019 +0200
+++ b/contrib/automation/hgautomation/__init__.py	Mon May 20 11:40:47 2019 -0400
@@ -53,7 +53,7 @@
 
         return password
 
-    def aws_connection(self, region: str):
+    def aws_connection(self, region: str, ensure_ec2_state: bool=True):
         """Obtain an AWSConnection instance bound to a specific region."""
 
-        return AWSConnection(self, region)
+        return AWSConnection(self, region, ensure_ec2_state=ensure_ec2_state)
--- a/contrib/automation/hgautomation/aws.py	Mon May 20 10:08:28 2019 +0200
+++ b/contrib/automation/hgautomation/aws.py	Mon May 20 11:40:47 2019 -0400
@@ -19,6 +19,13 @@
 import boto3
 import botocore.exceptions
 
+from .linux import (
+    BOOTSTRAP_DEBIAN,
+)
+from .ssh import (
+    exec_command as ssh_exec_command,
+    wait_for_ssh,
+)
 from .winrm import (
     run_powershell,
     wait_for_winrm,
@@ -31,12 +38,46 @@
                                 'install-windows-dependencies.ps1')
 
 
+INSTANCE_TYPES_WITH_STORAGE = {
+    'c5d',
+    'd2',
+    'h1',
+    'i3',
+    'm5ad',
+    'm5d',
+    'r5d',
+    'r5ad',
+    'x1',
+    'z1d',
+}
+
+
+DEBIAN_ACCOUNT_ID = '379101102735'
+UBUNTU_ACCOUNT_ID = '099720109477'
+
+
 KEY_PAIRS = {
     'automation',
 }
 
 
 SECURITY_GROUPS = {
+    'linux-dev-1': {
+        'description': 'Mercurial Linux instances that perform build/test automation',
+        'ingress': [
+            {
+                'FromPort': 22,
+                'ToPort': 22,
+                'IpProtocol': 'tcp',
+                'IpRanges': [
+                    {
+                        'CidrIp': '0.0.0.0/0',
+                        'Description': 'SSH from entire Internet',
+                    },
+                ],
+            },
+        ],
+    },
     'windows-dev-1': {
         'description': 'Mercurial Windows instances that perform build automation',
         'ingress': [
@@ -180,7 +221,7 @@
 class AWSConnection:
     """Manages the state of a connection with AWS."""
 
-    def __init__(self, automation, region: str):
+    def __init__(self, automation, region: str, ensure_ec2_state: bool=True):
         self.automation = automation
         self.local_state_path = automation.state_path
 
@@ -191,11 +232,12 @@
         self.ec2resource = self.session.resource('ec2')
         self.iamclient = self.session.client('iam')
         self.iamresource = self.session.resource('iam')
-
-        ensure_key_pairs(automation.state_path, self.ec2resource)
+        self.security_groups = {}
 
-        self.security_groups = ensure_security_groups(self.ec2resource)
-        ensure_iam_state(self.iamresource)
+        if ensure_ec2_state:
+            ensure_key_pairs(automation.state_path, self.ec2resource)
+            self.security_groups = ensure_security_groups(self.ec2resource)
+            ensure_iam_state(self.iamclient, self.iamresource)
 
     def key_pair_path_private(self, name):
         """Path to a key pair private key file."""
@@ -324,7 +366,7 @@
     profile.delete()
 
 
-def ensure_iam_state(iamresource, prefix='hg-'):
+def ensure_iam_state(iamclient, iamresource, prefix='hg-'):
     """Ensure IAM state is in sync with our canonical definition."""
 
     remote_profiles = {}
@@ -360,6 +402,10 @@
             InstanceProfileName=actual)
         remote_profiles[name] = profile
 
+        waiter = iamclient.get_waiter('instance_profile_exists')
+        waiter.wait(InstanceProfileName=actual)
+        print('IAM instance profile %s is available' % actual)
+
     for name in sorted(set(IAM_ROLES) - set(remote_roles)):
         entry = IAM_ROLES[name]
 
@@ -372,6 +418,10 @@
             AssumeRolePolicyDocument=ASSUME_ROLE_POLICY_DOCUMENT,
         )
 
+        waiter = iamclient.get_waiter('role_exists')
+        waiter.wait(RoleName=actual)
+        print('IAM role %s is available' % actual)
+
         remote_roles[name] = role
 
         for arn in entry['policy_arns']:
@@ -393,14 +443,14 @@
             profile.add_role(RoleName=role)
 
 
-def find_windows_server_2019_image(ec2resource):
-    """Find the Amazon published Windows Server 2019 base image."""
+def find_image(ec2resource, owner_id, name):
+    """Find an AMI by its owner ID and name."""
 
     images = ec2resource.images.filter(
         Filters=[
             {
-                'Name': 'owner-alias',
-                'Values': ['amazon'],
+                'Name': 'owner-id',
+                'Values': [owner_id],
             },
             {
                 'Name': 'state',
@@ -412,14 +462,14 @@
             },
             {
                 'Name': 'name',
-                'Values': ['Windows_Server-2019-English-Full-Base-2019.02.13'],
+                'Values': [name],
             },
         ])
 
     for image in images:
         return image
 
-    raise Exception('unable to find Windows Server 2019 image')
+    raise Exception('unable to find image for %s' % name)
 
 
 def ensure_security_groups(ec2resource, prefix='hg-'):
@@ -490,7 +540,7 @@
 
     terminate_ec2_instances(ec2resource, prefix=prefix)
 
-    for image in ec2resource.images.all():
+    for image in ec2resource.images.filter(Owners=['self']):
         if image.name.startswith(prefix):
             remove_ami(ec2resource, image)
 
@@ -505,6 +555,10 @@
 
     for role in iamresource.roles.all():
         if role.name.startswith(prefix):
+            for p in role.attached_policies.all():
+                print('detaching policy %s from %s' % (p.arn, role.name))
+                role.detach_policy(PolicyArn=p.arn)
+
             print('removing role %s' % role.name)
             role.delete()
 
@@ -671,6 +725,309 @@
         yield instances
 
 
+def resolve_fingerprint(fingerprint):
+    fingerprint = json.dumps(fingerprint, sort_keys=True)
+    return hashlib.sha256(fingerprint.encode('utf-8')).hexdigest()
+
+
+def find_and_reconcile_image(ec2resource, name, fingerprint):
+    """Attempt to find an existing EC2 AMI with a name and fingerprint.
+
+    If an image with the specified fingerprint is found, it is returned.
+    Otherwise None is returned.
+
+    Existing images for the specified name that don't have the specified
+    fingerprint or are missing required metadata or deleted.
+    """
+    # Find existing AMIs with this name and delete the ones that are invalid.
+    # Store a reference to a good image so it can be returned one the
+    # image state is reconciled.
+    images = ec2resource.images.filter(
+        Filters=[{'Name': 'name', 'Values': [name]}])
+
+    existing_image = None
+
+    for image in images:
+        if image.tags is None:
+            print('image %s for %s lacks required tags; removing' % (
+                image.id, image.name))
+            remove_ami(ec2resource, image)
+        else:
+            tags = {t['Key']: t['Value'] for t in image.tags}
+
+            if tags.get('HGIMAGEFINGERPRINT') == fingerprint:
+                existing_image = image
+            else:
+                print('image %s for %s has wrong fingerprint; removing' % (
+                      image.id, image.name))
+                remove_ami(ec2resource, image)
+
+    return existing_image
+
+
+def create_ami_from_instance(ec2client, instance, name, description,
+                             fingerprint):
+    """Create an AMI from a running instance.
+
+    Returns the ``ec2resource.Image`` representing the created AMI.
+    """
+    instance.stop()
+
+    ec2client.get_waiter('instance_stopped').wait(
+        InstanceIds=[instance.id],
+        WaiterConfig={
+            'Delay': 5,
+        })
+    print('%s is stopped' % instance.id)
+
+    image = instance.create_image(
+        Name=name,
+        Description=description,
+    )
+
+    image.create_tags(Tags=[
+        {
+            'Key': 'HGIMAGEFINGERPRINT',
+            'Value': fingerprint,
+        },
+    ])
+
+    print('waiting for image %s' % image.id)
+
+    ec2client.get_waiter('image_available').wait(
+        ImageIds=[image.id],
+    )
+
+    print('image %s available as %s' % (image.id, image.name))
+
+    return image
+
+
+def ensure_linux_dev_ami(c: AWSConnection, distro='debian9', prefix='hg-'):
+    """Ensures a Linux development AMI is available and up-to-date.
+
+    Returns an ``ec2.Image`` of either an existing AMI or a newly-built one.
+    """
+    ec2client = c.ec2client
+    ec2resource = c.ec2resource
+
+    name = '%s%s-%s' % (prefix, 'linux-dev', distro)
+
+    if distro == 'debian9':
+        image = find_image(
+            ec2resource,
+            DEBIAN_ACCOUNT_ID,
+            'debian-stretch-hvm-x86_64-gp2-2019-02-19-26620',
+        )
+        ssh_username = 'admin'
+    elif distro == 'ubuntu18.04':
+        image = find_image(
+            ec2resource,
+            UBUNTU_ACCOUNT_ID,
+            'ubuntu/images/hvm-ssd/ubuntu-bionic-18.04-amd64-server-20190403',
+        )
+        ssh_username = 'ubuntu'
+    elif distro == 'ubuntu18.10':
+        image = find_image(
+            ec2resource,
+            UBUNTU_ACCOUNT_ID,
+            'ubuntu/images/hvm-ssd/ubuntu-cosmic-18.10-amd64-server-20190402',
+        )
+        ssh_username = 'ubuntu'
+    elif distro == 'ubuntu19.04':
+        image = find_image(
+            ec2resource,
+            UBUNTU_ACCOUNT_ID,
+            'ubuntu/images/hvm-ssd/ubuntu-disco-19.04-amd64-server-20190417',
+        )
+        ssh_username = 'ubuntu'
+    else:
+        raise ValueError('unsupported Linux distro: %s' % distro)
+
+    config = {
+        'BlockDeviceMappings': [
+            {
+                'DeviceName': image.block_device_mappings[0]['DeviceName'],
+                'Ebs': {
+                    'DeleteOnTermination': True,
+                    'VolumeSize': 8,
+                    'VolumeType': 'gp2',
+                },
+            },
+        ],
+        'EbsOptimized': True,
+        'ImageId': image.id,
+        'InstanceInitiatedShutdownBehavior': 'stop',
+        # 8 VCPUs for compiling Python.
+        'InstanceType': 't3.2xlarge',
+        'KeyName': '%sautomation' % prefix,
+        'MaxCount': 1,
+        'MinCount': 1,
+        'SecurityGroupIds': [c.security_groups['linux-dev-1'].id],
+    }
+
+    requirements2_path = (pathlib.Path(__file__).parent.parent /
+                          'linux-requirements-py2.txt')
+    requirements3_path = (pathlib.Path(__file__).parent.parent /
+                          'linux-requirements-py3.txt')
+    with requirements2_path.open('r', encoding='utf-8') as fh:
+        requirements2 = fh.read()
+    with requirements3_path.open('r', encoding='utf-8') as fh:
+        requirements3 = fh.read()
+
+    # Compute a deterministic fingerprint to determine whether image needs to
+    # be regenerated.
+    fingerprint = resolve_fingerprint({
+        'instance_config': config,
+        'bootstrap_script': BOOTSTRAP_DEBIAN,
+        'requirements_py2': requirements2,
+        'requirements_py3': requirements3,
+    })
+
+    existing_image = find_and_reconcile_image(ec2resource, name, fingerprint)
+
+    if existing_image:
+        return existing_image
+
+    print('no suitable %s image found; creating one...' % name)
+
+    with temporary_ec2_instances(ec2resource, config) as instances:
+        wait_for_ip_addresses(instances)
+
+        instance = instances[0]
+
+        client = wait_for_ssh(
+            instance.public_ip_address, 22,
+            username=ssh_username,
+            key_filename=str(c.key_pair_path_private('automation')))
+
+        home = '/home/%s' % ssh_username
+
+        with client:
+            print('connecting to SSH server')
+            sftp = client.open_sftp()
+
+            print('uploading bootstrap files')
+            with sftp.open('%s/bootstrap' % home, 'wb') as fh:
+                fh.write(BOOTSTRAP_DEBIAN)
+                fh.chmod(0o0700)
+
+            with sftp.open('%s/requirements-py2.txt' % home, 'wb') as fh:
+                fh.write(requirements2)
+                fh.chmod(0o0700)
+
+            with sftp.open('%s/requirements-py3.txt' % home, 'wb') as fh:
+                fh.write(requirements3)
+                fh.chmod(0o0700)
+
+            print('executing bootstrap')
+            chan, stdin, stdout = ssh_exec_command(client,
+                                                   '%s/bootstrap' % home)
+            stdin.close()
+
+            for line in stdout:
+                print(line, end='')
+
+            res = chan.recv_exit_status()
+            if res:
+                raise Exception('non-0 exit from bootstrap: %d' % res)
+
+            print('bootstrap completed; stopping %s to create %s' % (
+                  instance.id, name))
+
+        return create_ami_from_instance(ec2client, instance, name,
+                                        'Mercurial Linux development environment',
+                                        fingerprint)
+
+
+@contextlib.contextmanager
+def temporary_linux_dev_instances(c: AWSConnection, image, instance_type,
+                                  prefix='hg-', ensure_extra_volume=False):
+    """Create temporary Linux development EC2 instances.
+
+    Context manager resolves to a list of ``ec2.Instance`` that were created
+    and are running.
+
+    ``ensure_extra_volume`` can be set to ``True`` to require that instances
+    have a 2nd storage volume available other than the primary AMI volume.
+    For instance types with instance storage, this does nothing special.
+    But for instance types without instance storage, an additional EBS volume
+    will be added to the instance.
+
+    Instances have an ``ssh_client`` attribute containing a paramiko SSHClient
+    instance bound to the instance.
+
+    Instances have an ``ssh_private_key_path`` attributing containing the
+    str path to the SSH private key to connect to the instance.
+    """
+
+    block_device_mappings = [
+        {
+            'DeviceName': image.block_device_mappings[0]['DeviceName'],
+            'Ebs': {
+                'DeleteOnTermination': True,
+                'VolumeSize': 8,
+                'VolumeType': 'gp2',
+            },
+        }
+    ]
+
+    # This is not an exhaustive list of instance types having instance storage.
+    # But
+    if (ensure_extra_volume
+        and not instance_type.startswith(tuple(INSTANCE_TYPES_WITH_STORAGE))):
+        main_device = block_device_mappings[0]['DeviceName']
+
+        if main_device == 'xvda':
+            second_device = 'xvdb'
+        elif main_device == '/dev/sda1':
+            second_device = '/dev/sdb'
+        else:
+            raise ValueError('unhandled primary EBS device name: %s' %
+                             main_device)
+
+        block_device_mappings.append({
+            'DeviceName': second_device,
+            'Ebs': {
+                'DeleteOnTermination': True,
+                'VolumeSize': 8,
+                'VolumeType': 'gp2',
+            }
+        })
+
+    config = {
+        'BlockDeviceMappings': block_device_mappings,
+        'EbsOptimized': True,
+        'ImageId': image.id,
+        'InstanceInitiatedShutdownBehavior': 'terminate',
+        'InstanceType': instance_type,
+        'KeyName': '%sautomation' % prefix,
+        'MaxCount': 1,
+        'MinCount': 1,
+        'SecurityGroupIds': [c.security_groups['linux-dev-1'].id],
+    }
+
+    with temporary_ec2_instances(c.ec2resource, config) as instances:
+        wait_for_ip_addresses(instances)
+
+        ssh_private_key_path = str(c.key_pair_path_private('automation'))
+
+        for instance in instances:
+            client = wait_for_ssh(
+                instance.public_ip_address, 22,
+                username='hg',
+                key_filename=ssh_private_key_path)
+
+            instance.ssh_client = client
+            instance.ssh_private_key_path = ssh_private_key_path
+
+        try:
+            yield instances
+        finally:
+            for instance in instances:
+                instance.ssh_client.close()
+
+
 def ensure_windows_dev_ami(c: AWSConnection, prefix='hg-'):
     """Ensure Windows Development AMI is available and up-to-date.
 
@@ -689,6 +1046,10 @@
 
     name = '%s%s' % (prefix, 'windows-dev')
 
+    image = find_image(ec2resource,
+                       '801119661308',
+                       'Windows_Server-2019-English-Full-Base-2019.02.13')
+
     config = {
         'BlockDeviceMappings': [
             {
@@ -700,7 +1061,7 @@
                 },
             }
         ],
-        'ImageId': find_windows_server_2019_image(ec2resource).id,
+        'ImageId': image.id,
         'InstanceInitiatedShutdownBehavior': 'stop',
         'InstanceType': 't3.medium',
         'KeyName': '%sautomation' % prefix,
@@ -735,38 +1096,14 @@
 
     # Compute a deterministic fingerprint to determine whether image needs
     # to be regenerated.
-    fingerprint = {
+    fingerprint = resolve_fingerprint({
         'instance_config': config,
         'user_data': WINDOWS_USER_DATA,
         'initial_bootstrap': WINDOWS_BOOTSTRAP_POWERSHELL,
         'bootstrap_commands': commands,
-    }
-
-    fingerprint = json.dumps(fingerprint, sort_keys=True)
-    fingerprint = hashlib.sha256(fingerprint.encode('utf-8')).hexdigest()
-
-    # Find existing AMIs with this name and delete the ones that are invalid.
-    # Store a reference to a good image so it can be returned one the
-    # image state is reconciled.
-    images = ec2resource.images.filter(
-        Filters=[{'Name': 'name', 'Values': [name]}])
-
-    existing_image = None
+    })
 
-    for image in images:
-        if image.tags is None:
-            print('image %s for %s lacks required tags; removing' % (
-                image.id, image.name))
-            remove_ami(ec2resource, image)
-        else:
-            tags = {t['Key']: t['Value'] for t in image.tags}
-
-            if tags.get('HGIMAGEFINGERPRINT') == fingerprint:
-                existing_image = image
-            else:
-                print('image %s for %s has wrong fingerprint; removing' % (
-                      image.id, image.name))
-                remove_ami(ec2resource, image)
+    existing_image = find_and_reconcile_image(ec2resource, name, fingerprint)
 
     if existing_image:
         return existing_image
@@ -795,10 +1132,26 @@
         )
 
         # Reboot so all updates are fully applied.
+        #
+        # We don't use instance.reboot() here because it is asynchronous and
+        # we don't know when exactly the instance has rebooted. It could take
+        # a while to stop and we may start trying to interact with the instance
+        # before it has rebooted.
         print('rebooting instance %s' % instance.id)
-        ec2client.reboot_instances(InstanceIds=[instance.id])
+        instance.stop()
+        ec2client.get_waiter('instance_stopped').wait(
+            InstanceIds=[instance.id],
+            WaiterConfig={
+                'Delay': 5,
+            })
 
-        time.sleep(15)
+        instance.start()
+        wait_for_ip_addresses([instance])
+
+        # There is a race condition here between the User Data PS script running
+        # and us connecting to WinRM. This can manifest as
+        # "AuthorizationManager check failed" failures during run_powershell().
+        # TODO figure out a workaround.
 
         print('waiting for Windows Remote Management to come back...')
         client = wait_for_winrm(instance.public_ip_address, 'Administrator',
@@ -810,36 +1163,9 @@
         run_powershell(instance.winrm_client, '\n'.join(commands))
 
         print('bootstrap completed; stopping %s to create image' % instance.id)
-        instance.stop()
-
-        ec2client.get_waiter('instance_stopped').wait(
-            InstanceIds=[instance.id],
-            WaiterConfig={
-                'Delay': 5,
-            })
-        print('%s is stopped' % instance.id)
-
-        image = instance.create_image(
-            Name=name,
-            Description='Mercurial Windows development environment',
-        )
-
-        image.create_tags(Tags=[
-            {
-                'Key': 'HGIMAGEFINGERPRINT',
-                'Value': fingerprint,
-            },
-        ])
-
-        print('waiting for image %s' % image.id)
-
-        ec2client.get_waiter('image_available').wait(
-            ImageIds=[image.id],
-        )
-
-        print('image %s available as %s' % (image.id, image.name))
-
-        return image
+        return create_ami_from_instance(ec2client, instance, name,
+                                        'Mercurial Windows development environment',
+                                        fingerprint)
 
 
 @contextlib.contextmanager
--- a/contrib/automation/hgautomation/cli.py	Mon May 20 10:08:28 2019 +0200
+++ b/contrib/automation/hgautomation/cli.py	Mon May 20 11:40:47 2019 -0400
@@ -8,12 +8,15 @@
 # no-check-code because Python 3 native.
 
 import argparse
+import concurrent.futures as futures
 import os
 import pathlib
+import time
 
 from . import (
     aws,
     HGAutomation,
+    linux,
     windows,
 )
 
@@ -22,6 +25,33 @@
 DIST_PATH = SOURCE_ROOT / 'dist'
 
 
+def bootstrap_linux_dev(hga: HGAutomation, aws_region, distros=None,
+                        parallel=False):
+    c = hga.aws_connection(aws_region)
+
+    if distros:
+        distros = distros.split(',')
+    else:
+        distros = sorted(linux.DISTROS)
+
+    # TODO There is a wonky interaction involving KeyboardInterrupt whereby
+    # the context manager that is supposed to terminate the temporary EC2
+    # instance doesn't run. Until we fix this, make parallel building opt-in
+    # so we don't orphan instances.
+    if parallel:
+        fs = []
+
+        with futures.ThreadPoolExecutor(len(distros)) as e:
+            for distro in distros:
+                fs.append(e.submit(aws.ensure_linux_dev_ami, c, distro=distro))
+
+            for f in fs:
+                f.result()
+    else:
+        for distro in distros:
+            aws.ensure_linux_dev_ami(c, distro=distro)
+
+
 def bootstrap_windows_dev(hga: HGAutomation, aws_region):
     c = hga.aws_connection(aws_region)
     image = aws.ensure_windows_dev_ami(c)
@@ -73,7 +103,8 @@
             windows.build_wheel(instance.winrm_client, a, DIST_PATH)
 
 
-def build_all_windows_packages(hga: HGAutomation, aws_region, revision):
+def build_all_windows_packages(hga: HGAutomation, aws_region, revision,
+                               version):
     c = hga.aws_connection(aws_region)
     image = aws.ensure_windows_dev_ami(c)
     DIST_PATH.mkdir(exist_ok=True)
@@ -89,19 +120,52 @@
             windows.purge_hg(winrm_client)
             windows.build_wheel(winrm_client, arch, DIST_PATH)
             windows.purge_hg(winrm_client)
-            windows.build_inno_installer(winrm_client, arch, DIST_PATH)
+            windows.build_inno_installer(winrm_client, arch, DIST_PATH,
+                                         version=version)
             windows.purge_hg(winrm_client)
-            windows.build_wix_installer(winrm_client, arch, DIST_PATH)
+            windows.build_wix_installer(winrm_client, arch, DIST_PATH,
+                                        version=version)
 
 
 def terminate_ec2_instances(hga: HGAutomation, aws_region):
-    c = hga.aws_connection(aws_region)
+    c = hga.aws_connection(aws_region, ensure_ec2_state=False)
     aws.terminate_ec2_instances(c.ec2resource)
 
 
 def purge_ec2_resources(hga: HGAutomation, aws_region):
+    c = hga.aws_connection(aws_region, ensure_ec2_state=False)
+    aws.remove_resources(c)
+
+
+def run_tests_linux(hga: HGAutomation, aws_region, instance_type,
+                    python_version, test_flags, distro, filesystem):
     c = hga.aws_connection(aws_region)
-    aws.remove_resources(c)
+    image = aws.ensure_linux_dev_ami(c, distro=distro)
+
+    t_start = time.time()
+
+    ensure_extra_volume = filesystem not in ('default', 'tmpfs')
+
+    with aws.temporary_linux_dev_instances(
+        c, image, instance_type,
+        ensure_extra_volume=ensure_extra_volume) as insts:
+
+        instance = insts[0]
+
+        linux.prepare_exec_environment(instance.ssh_client,
+                                       filesystem=filesystem)
+        linux.synchronize_hg(SOURCE_ROOT, instance, '.')
+        t_prepared = time.time()
+        linux.run_tests(instance.ssh_client, python_version,
+                        test_flags)
+        t_done = time.time()
+
+    t_setup = t_prepared - t_start
+    t_all = t_done - t_start
+
+    print(
+        'total time: %.1fs; setup: %.1fs; tests: %.1fs; setup overhead: %.1f%%'
+        % (t_all, t_setup, t_done - t_prepared, t_setup / t_all * 100.0))
 
 
 def run_tests_windows(hga: HGAutomation, aws_region, instance_type,
@@ -135,6 +199,21 @@
     subparsers = parser.add_subparsers()
 
     sp = subparsers.add_parser(
+        'bootstrap-linux-dev',
+        help='Bootstrap Linux development environments',
+    )
+    sp.add_argument(
+        '--distros',
+        help='Comma delimited list of distros to bootstrap',
+    )
+    sp.add_argument(
+        '--parallel',
+        action='store_true',
+        help='Generate AMIs in parallel (not CTRL-c safe)'
+    )
+    sp.set_defaults(func=bootstrap_linux_dev)
+
+    sp = subparsers.add_parser(
         'bootstrap-windows-dev',
         help='Bootstrap the Windows development environment',
     )
@@ -149,6 +228,10 @@
         help='Mercurial revision to build',
         default='.',
     )
+    sp.add_argument(
+        '--version',
+        help='Mercurial version string to use',
+    )
     sp.set_defaults(func=build_all_windows_packages)
 
     sp = subparsers.add_parser(
@@ -226,6 +309,41 @@
     sp.set_defaults(func=purge_ec2_resources)
 
     sp = subparsers.add_parser(
+        'run-tests-linux',
+        help='Run tests on Linux',
+    )
+    sp.add_argument(
+        '--distro',
+        help='Linux distribution to run tests on',
+        choices=linux.DISTROS,
+        default='debian9',
+    )
+    sp.add_argument(
+        '--filesystem',
+        help='Filesystem type to use',
+        choices={'btrfs', 'default', 'ext3', 'ext4', 'jfs', 'tmpfs', 'xfs'},
+        default='default',
+    )
+    sp.add_argument(
+        '--instance-type',
+        help='EC2 instance type to use',
+        default='c5.9xlarge',
+    )
+    sp.add_argument(
+        '--python-version',
+        help='Python version to use',
+        choices={'system2', 'system3', '2.7', '3.5', '3.6', '3.7', '3.8',
+                 'pypy', 'pypy3.5', 'pypy3.6'},
+        default='system2',
+    )
+    sp.add_argument(
+        'test_flags',
+        help='Extra command line flags to pass to run-tests.py',
+        nargs='*',
+    )
+    sp.set_defaults(func=run_tests_linux)
+
+    sp = subparsers.add_parser(
         'run-tests-windows',
         help='Run tests on Windows',
     )
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/contrib/automation/hgautomation/linux.py	Mon May 20 11:40:47 2019 -0400
@@ -0,0 +1,545 @@
+# linux.py - Linux specific automation functionality
+#
+# Copyright 2019 Gregory Szorc <gregory.szorc@gmail.com>
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+
+# no-check-code because Python 3 native.
+
+import os
+import pathlib
+import shlex
+import subprocess
+import tempfile
+
+from .ssh import (
+    exec_command,
+)
+
+
+# Linux distributions that are supported.
+DISTROS = {
+    'debian9',
+    'ubuntu18.04',
+    'ubuntu18.10',
+    'ubuntu19.04',
+}
+
+INSTALL_PYTHONS = r'''
+PYENV2_VERSIONS="2.7.16 pypy2.7-7.1.1"
+PYENV3_VERSIONS="3.5.7 3.6.8 3.7.3 3.8-dev pypy3.5-7.0.0 pypy3.6-7.1.1"
+
+git clone https://github.com/pyenv/pyenv.git /hgdev/pyenv
+pushd /hgdev/pyenv
+git checkout 3faeda67bb33e07750d1a104271369a7384ca45c
+popd
+
+export PYENV_ROOT="/hgdev/pyenv"
+export PATH="$PYENV_ROOT/bin:$PATH"
+
+# pip 19.0.3.
+PIP_SHA256=efe99298f3fbb1f56201ce6b81d2658067d2f7d7dfc2d412e0d3cacc9a397c61
+wget -O get-pip.py --progress dot:mega https://github.com/pypa/get-pip/raw/fee32c376da1ff6496a798986d7939cd51e1644f/get-pip.py
+echo "${PIP_SHA256} get-pip.py" | sha256sum --check -
+
+VIRTUALENV_SHA256=984d7e607b0a5d1329425dd8845bd971b957424b5ba664729fab51ab8c11bc39
+VIRTUALENV_TARBALL=virtualenv-16.4.3.tar.gz
+wget -O ${VIRTUALENV_TARBALL} --progress dot:mega https://files.pythonhosted.org/packages/37/db/89d6b043b22052109da35416abc3c397655e4bd3cff031446ba02b9654fa/${VIRTUALENV_TARBALL}
+echo "${VIRTUALENV_SHA256} ${VIRTUALENV_TARBALL}" | sha256sum --check -
+
+for v in ${PYENV2_VERSIONS}; do
+    pyenv install -v ${v}
+    ${PYENV_ROOT}/versions/${v}/bin/python get-pip.py
+    ${PYENV_ROOT}/versions/${v}/bin/pip install ${VIRTUALENV_TARBALL}
+    ${PYENV_ROOT}/versions/${v}/bin/pip install -r /hgdev/requirements-py2.txt
+done
+
+for v in ${PYENV3_VERSIONS}; do
+    pyenv install -v ${v}
+    ${PYENV_ROOT}/versions/${v}/bin/python get-pip.py
+    ${PYENV_ROOT}/versions/${v}/bin/pip install -r /hgdev/requirements-py3.txt
+done
+
+pyenv global ${PYENV2_VERSIONS} ${PYENV3_VERSIONS} system
+'''.lstrip().replace('\r\n', '\n')
+
+
+BOOTSTRAP_VIRTUALENV = r'''
+/usr/bin/virtualenv /hgdev/venv-bootstrap
+
+HG_SHA256=1bdd21bb87d1e05fb5cd395d488d0e0cc2f2f90ce0fd248e31a03595da5ccb47
+HG_TARBALL=mercurial-4.9.1.tar.gz
+
+wget -O ${HG_TARBALL} --progress dot:mega https://www.mercurial-scm.org/release/${HG_TARBALL}
+echo "${HG_SHA256} ${HG_TARBALL}" | sha256sum --check -
+
+/hgdev/venv-bootstrap/bin/pip install ${HG_TARBALL}
+'''.lstrip().replace('\r\n', '\n')
+
+
+BOOTSTRAP_DEBIAN = r'''
+#!/bin/bash
+
+set -ex
+
+DISTRO=`grep DISTRIB_ID /etc/lsb-release  | awk -F= '{{print $2}}'`
+DEBIAN_VERSION=`cat /etc/debian_version`
+LSB_RELEASE=`lsb_release -cs`
+
+sudo /usr/sbin/groupadd hg
+sudo /usr/sbin/groupadd docker
+sudo /usr/sbin/useradd -g hg -G sudo,docker -d /home/hg -m -s /bin/bash hg
+sudo mkdir /home/hg/.ssh
+sudo cp ~/.ssh/authorized_keys /home/hg/.ssh/authorized_keys
+sudo chown -R hg:hg /home/hg/.ssh
+sudo chmod 700 /home/hg/.ssh
+sudo chmod 600 /home/hg/.ssh/authorized_keys
+
+cat << EOF | sudo tee /etc/sudoers.d/90-hg
+hg ALL=(ALL) NOPASSWD:ALL
+EOF
+
+sudo apt-get update
+sudo DEBIAN_FRONTEND=noninteractive apt-get -yq dist-upgrade
+
+# Install packages necessary to set up Docker Apt repo.
+sudo DEBIAN_FRONTEND=noninteractive apt-get -yq install --no-install-recommends \
+    apt-transport-https \
+    gnupg
+
+cat > docker-apt-key << EOF
+-----BEGIN PGP PUBLIC KEY BLOCK-----
+
+mQINBFit2ioBEADhWpZ8/wvZ6hUTiXOwQHXMAlaFHcPH9hAtr4F1y2+OYdbtMuth
+lqqwp028AqyY+PRfVMtSYMbjuQuu5byyKR01BbqYhuS3jtqQmljZ/bJvXqnmiVXh
+38UuLa+z077PxyxQhu5BbqntTPQMfiyqEiU+BKbq2WmANUKQf+1AmZY/IruOXbnq
+L4C1+gJ8vfmXQt99npCaxEjaNRVYfOS8QcixNzHUYnb6emjlANyEVlZzeqo7XKl7
+UrwV5inawTSzWNvtjEjj4nJL8NsLwscpLPQUhTQ+7BbQXAwAmeHCUTQIvvWXqw0N
+cmhh4HgeQscQHYgOJjjDVfoY5MucvglbIgCqfzAHW9jxmRL4qbMZj+b1XoePEtht
+ku4bIQN1X5P07fNWzlgaRL5Z4POXDDZTlIQ/El58j9kp4bnWRCJW0lya+f8ocodo
+vZZ+Doi+fy4D5ZGrL4XEcIQP/Lv5uFyf+kQtl/94VFYVJOleAv8W92KdgDkhTcTD
+G7c0tIkVEKNUq48b3aQ64NOZQW7fVjfoKwEZdOqPE72Pa45jrZzvUFxSpdiNk2tZ
+XYukHjlxxEgBdC/J3cMMNRE1F4NCA3ApfV1Y7/hTeOnmDuDYwr9/obA8t016Yljj
+q5rdkywPf4JF8mXUW5eCN1vAFHxeg9ZWemhBtQmGxXnw9M+z6hWwc6ahmwARAQAB
+tCtEb2NrZXIgUmVsZWFzZSAoQ0UgZGViKSA8ZG9ja2VyQGRvY2tlci5jb20+iQI3
+BBMBCgAhBQJYrefAAhsvBQsJCAcDBRUKCQgLBRYCAwEAAh4BAheAAAoJEI2BgDwO
+v82IsskP/iQZo68flDQmNvn8X5XTd6RRaUH33kXYXquT6NkHJciS7E2gTJmqvMqd
+tI4mNYHCSEYxI5qrcYV5YqX9P6+Ko+vozo4nseUQLPH/ATQ4qL0Zok+1jkag3Lgk
+jonyUf9bwtWxFp05HC3GMHPhhcUSexCxQLQvnFWXD2sWLKivHp2fT8QbRGeZ+d3m
+6fqcd5Fu7pxsqm0EUDK5NL+nPIgYhN+auTrhgzhK1CShfGccM/wfRlei9Utz6p9P
+XRKIlWnXtT4qNGZNTN0tR+NLG/6Bqd8OYBaFAUcue/w1VW6JQ2VGYZHnZu9S8LMc
+FYBa5Ig9PxwGQOgq6RDKDbV+PqTQT5EFMeR1mrjckk4DQJjbxeMZbiNMG5kGECA8
+g383P3elhn03WGbEEa4MNc3Z4+7c236QI3xWJfNPdUbXRaAwhy/6rTSFbzwKB0Jm
+ebwzQfwjQY6f55MiI/RqDCyuPj3r3jyVRkK86pQKBAJwFHyqj9KaKXMZjfVnowLh
+9svIGfNbGHpucATqREvUHuQbNnqkCx8VVhtYkhDb9fEP2xBu5VvHbR+3nfVhMut5
+G34Ct5RS7Jt6LIfFdtcn8CaSas/l1HbiGeRgc70X/9aYx/V/CEJv0lIe8gP6uDoW
+FPIZ7d6vH+Vro6xuWEGiuMaiznap2KhZmpkgfupyFmplh0s6knymuQINBFit2ioB
+EADneL9S9m4vhU3blaRjVUUyJ7b/qTjcSylvCH5XUE6R2k+ckEZjfAMZPLpO+/tF
+M2JIJMD4SifKuS3xck9KtZGCufGmcwiLQRzeHF7vJUKrLD5RTkNi23ydvWZgPjtx
+Q+DTT1Zcn7BrQFY6FgnRoUVIxwtdw1bMY/89rsFgS5wwuMESd3Q2RYgb7EOFOpnu
+w6da7WakWf4IhnF5nsNYGDVaIHzpiqCl+uTbf1epCjrOlIzkZ3Z3Yk5CM/TiFzPk
+z2lLz89cpD8U+NtCsfagWWfjd2U3jDapgH+7nQnCEWpROtzaKHG6lA3pXdix5zG8
+eRc6/0IbUSWvfjKxLLPfNeCS2pCL3IeEI5nothEEYdQH6szpLog79xB9dVnJyKJb
+VfxXnseoYqVrRz2VVbUI5Blwm6B40E3eGVfUQWiux54DspyVMMk41Mx7QJ3iynIa
+1N4ZAqVMAEruyXTRTxc9XW0tYhDMA/1GYvz0EmFpm8LzTHA6sFVtPm/ZlNCX6P1X
+zJwrv7DSQKD6GGlBQUX+OeEJ8tTkkf8QTJSPUdh8P8YxDFS5EOGAvhhpMBYD42kQ
+pqXjEC+XcycTvGI7impgv9PDY1RCC1zkBjKPa120rNhv/hkVk/YhuGoajoHyy4h7
+ZQopdcMtpN2dgmhEegny9JCSwxfQmQ0zK0g7m6SHiKMwjwARAQABiQQ+BBgBCAAJ
+BQJYrdoqAhsCAikJEI2BgDwOv82IwV0gBBkBCAAGBQJYrdoqAAoJEH6gqcPyc/zY
+1WAP/2wJ+R0gE6qsce3rjaIz58PJmc8goKrir5hnElWhPgbq7cYIsW5qiFyLhkdp
+YcMmhD9mRiPpQn6Ya2w3e3B8zfIVKipbMBnke/ytZ9M7qHmDCcjoiSmwEXN3wKYI
+mD9VHONsl/CG1rU9Isw1jtB5g1YxuBA7M/m36XN6x2u+NtNMDB9P56yc4gfsZVES
+KA9v+yY2/l45L8d/WUkUi0YXomn6hyBGI7JrBLq0CX37GEYP6O9rrKipfz73XfO7
+JIGzOKZlljb/D9RX/g7nRbCn+3EtH7xnk+TK/50euEKw8SMUg147sJTcpQmv6UzZ
+cM4JgL0HbHVCojV4C/plELwMddALOFeYQzTif6sMRPf+3DSj8frbInjChC3yOLy0
+6br92KFom17EIj2CAcoeq7UPhi2oouYBwPxh5ytdehJkoo+sN7RIWua6P2WSmon5
+U888cSylXC0+ADFdgLX9K2zrDVYUG1vo8CX0vzxFBaHwN6Px26fhIT1/hYUHQR1z
+VfNDcyQmXqkOnZvvoMfz/Q0s9BhFJ/zU6AgQbIZE/hm1spsfgvtsD1frZfygXJ9f
+irP+MSAI80xHSf91qSRZOj4Pl3ZJNbq4yYxv0b1pkMqeGdjdCYhLU+LZ4wbQmpCk
+SVe2prlLureigXtmZfkqevRz7FrIZiu9ky8wnCAPwC7/zmS18rgP/17bOtL4/iIz
+QhxAAoAMWVrGyJivSkjhSGx1uCojsWfsTAm11P7jsruIL61ZzMUVE2aM3Pmj5G+W
+9AcZ58Em+1WsVnAXdUR//bMmhyr8wL/G1YO1V3JEJTRdxsSxdYa4deGBBY/Adpsw
+24jxhOJR+lsJpqIUeb999+R8euDhRHG9eFO7DRu6weatUJ6suupoDTRWtr/4yGqe
+dKxV3qQhNLSnaAzqW/1nA3iUB4k7kCaKZxhdhDbClf9P37qaRW467BLCVO/coL3y
+Vm50dwdrNtKpMBh3ZpbB1uJvgi9mXtyBOMJ3v8RZeDzFiG8HdCtg9RvIt/AIFoHR
+H3S+U79NT6i0KPzLImDfs8T7RlpyuMc4Ufs8ggyg9v3Ae6cN3eQyxcK3w0cbBwsh
+/nQNfsA6uu+9H7NhbehBMhYnpNZyrHzCmzyXkauwRAqoCbGCNykTRwsur9gS41TQ
+M8ssD1jFheOJf3hODnkKU+HKjvMROl1DK7zdmLdNzA1cvtZH/nCC9KPj1z8QC47S
+xx+dTZSx4ONAhwbS/LN3PoKtn8LPjY9NP9uDWI+TWYquS2U+KHDrBDlsgozDbs/O
+jCxcpDzNmXpWQHEtHU7649OXHP7UeNST1mCUCH5qdank0V1iejF6/CfTFU4MfcrG
+YT90qFF93M3v01BbxP+EIY2/9tiIPbrd
+=0YYh
+-----END PGP PUBLIC KEY BLOCK-----
+EOF
+
+sudo apt-key add docker-apt-key
+
+if [ "$DEBIAN_VERSION" = "9.8" ]; then
+cat << EOF | sudo tee -a /etc/apt/sources.list
+# Need backports for clang-format-6.0
+deb http://deb.debian.org/debian stretch-backports main
+
+# Sources are useful if we want to compile things locally.
+deb-src http://deb.debian.org/debian stretch main
+deb-src http://security.debian.org/debian-security stretch/updates main
+deb-src http://deb.debian.org/debian stretch-updates main
+deb-src http://deb.debian.org/debian stretch-backports main
+
+deb [arch=amd64] https://download.docker.com/linux/debian stretch stable
+EOF
+
+elif [ "$DISTRO" = "Ubuntu" ]; then
+cat << EOF | sudo tee -a /etc/apt/sources.list
+deb [arch=amd64] https://download.docker.com/linux/ubuntu $LSB_RELEASE stable
+EOF
+
+fi
+
+sudo apt-get update
+
+PACKAGES="\
+    btrfs-progs \
+    build-essential \
+    bzr \
+    clang-format-6.0 \
+    cvs \
+    darcs \
+    debhelper \
+    devscripts \
+    dpkg-dev \
+    dstat \
+    emacs \
+    gettext \
+    git \
+    htop \
+    iotop \
+    jfsutils \
+    libbz2-dev \
+    libexpat1-dev \
+    libffi-dev \
+    libgdbm-dev \
+    liblzma-dev \
+    libncurses5-dev \
+    libnss3-dev \
+    libreadline-dev \
+    libsqlite3-dev \
+    libssl-dev \
+    netbase \
+    ntfs-3g \
+    nvme-cli \
+    pyflakes \
+    pyflakes3 \
+    pylint \
+    pylint3 \
+    python-all-dev \
+    python-dev \
+    python-docutils \
+    python-fuzzywuzzy \
+    python-pygments \
+    python-subversion \
+    python-vcr \
+    python3-dev \
+    python3-docutils \
+    python3-fuzzywuzzy \
+    python3-pygments \
+    python3-vcr \
+    rsync \
+    sqlite3 \
+    subversion \
+    tcl-dev \
+    tk-dev \
+    tla \
+    unzip \
+    uuid-dev \
+    vim \
+    virtualenv \
+    wget \
+    xfsprogs \
+    zip \
+    zlib1g-dev"
+
+if [ "$DEBIAN_VERSION" = "9.8" ]; then
+    PACKAGES="$PACKAGES linux-perf"
+elif [ "$DISTRO" = "Ubuntu" ]; then
+    PACKAGES="$PACKAGES linux-tools-common"
+fi
+
+# Ubuntu 19.04 removes monotone.
+if [ "$LSB_RELEASE" != "disco" ]; then
+    PACKAGES="$PACKAGES monotone"
+fi
+
+# As of April 27, 2019, Docker hasn't published packages for
+# Ubuntu 19.04 yet.
+if [ "$LSB_RELEASE" != "disco" ]; then
+    PACKAGES="$PACKAGES docker-ce"
+fi
+
+sudo DEBIAN_FRONTEND=noninteractive apt-get -yq install --no-install-recommends $PACKAGES
+
+# Create clang-format symlink so test harness finds it.
+sudo update-alternatives --install /usr/bin/clang-format clang-format \
+    /usr/bin/clang-format-6.0 1000
+
+sudo mkdir /hgdev
+# Will be normalized to hg:hg later.
+sudo chown `whoami` /hgdev
+
+cp requirements-py2.txt /hgdev/requirements-py2.txt
+cp requirements-py3.txt /hgdev/requirements-py3.txt
+
+# Disable the pip version check because it uses the network and can
+# be annoying.
+cat << EOF | sudo tee -a /etc/pip.conf
+[global]
+disable-pip-version-check = True
+EOF
+
+{install_pythons}
+{bootstrap_virtualenv}
+
+/hgdev/venv-bootstrap/bin/hg clone https://www.mercurial-scm.org/repo/hg /hgdev/src
+
+# Mark the repo as non-publishing.
+cat >> /hgdev/src/.hg/hgrc << EOF
+[phases]
+publish = false
+EOF
+
+sudo chown -R hg:hg /hgdev
+'''.lstrip().format(
+    install_pythons=INSTALL_PYTHONS,
+    bootstrap_virtualenv=BOOTSTRAP_VIRTUALENV
+).replace('\r\n', '\n')
+
+
+# Prepares /hgdev for operations.
+PREPARE_HGDEV = '''
+#!/bin/bash
+
+set -e
+
+FS=$1
+
+ensure_device() {
+    if [ -z "${DEVICE}" ]; then
+        echo "could not find block device to format"
+        exit 1
+    fi
+}
+
+# Determine device to partition for extra filesystem.
+# If only 1 volume is present, it will be the root volume and
+# should be /dev/nvme0. If multiple volumes are present, the
+# root volume could be nvme0 or nvme1. Use whichever one doesn't have
+# a partition.
+if [ -e /dev/nvme1n1 ]; then
+    if [ -e /dev/nvme0n1p1 ]; then
+        DEVICE=/dev/nvme1n1
+    else
+        DEVICE=/dev/nvme0n1
+    fi
+else
+    DEVICE=
+fi
+
+sudo mkdir /hgwork
+
+if [ "${FS}" != "default" -a "${FS}" != "tmpfs" ]; then
+    ensure_device
+    echo "creating ${FS} filesystem on ${DEVICE}"
+fi
+
+if [ "${FS}" = "default" ]; then
+    :
+
+elif [ "${FS}" = "btrfs" ]; then
+    sudo mkfs.btrfs ${DEVICE}
+    sudo mount ${DEVICE} /hgwork
+
+elif [ "${FS}" = "ext3" ]; then
+    # lazy_journal_init speeds up filesystem creation at the expense of
+    # integrity if things crash. We are an ephemeral instance, so we don't
+    # care about integrity.
+    sudo mkfs.ext3 -E lazy_journal_init=1 ${DEVICE}
+    sudo mount ${DEVICE} /hgwork
+
+elif [ "${FS}" = "ext4" ]; then
+    sudo mkfs.ext4 -E lazy_journal_init=1 ${DEVICE}
+    sudo mount ${DEVICE} /hgwork
+
+elif [ "${FS}" = "jfs" ]; then
+    sudo mkfs.jfs ${DEVICE}
+    sudo mount ${DEVICE} /hgwork
+
+elif [ "${FS}" = "tmpfs" ]; then
+    echo "creating tmpfs volume in /hgwork"
+    sudo mount -t tmpfs -o size=1024M tmpfs /hgwork
+
+elif [ "${FS}" = "xfs" ]; then
+    sudo mkfs.xfs ${DEVICE}
+    sudo mount ${DEVICE} /hgwork
+
+else
+    echo "unsupported filesystem: ${FS}"
+    exit 1
+fi
+
+echo "/hgwork ready"
+
+sudo chown hg:hg /hgwork
+mkdir /hgwork/tmp
+chown hg:hg /hgwork/tmp
+
+rsync -a /hgdev/src /hgwork/
+'''.lstrip().replace('\r\n', '\n')
+
+
+HG_UPDATE_CLEAN = '''
+set -ex
+
+HG=/hgdev/venv-bootstrap/bin/hg
+
+cd /hgwork/src
+${HG} --config extensions.purge= purge --all
+${HG} update -C $1
+${HG} log -r .
+'''.lstrip().replace('\r\n', '\n')
+
+
+def prepare_exec_environment(ssh_client, filesystem='default'):
+    """Prepare an EC2 instance to execute things.
+
+    The AMI has an ``/hgdev`` bootstrapped with various Python installs
+    and a clone of the Mercurial repo.
+
+    In EC2, EBS volumes launched from snapshots have wonky performance behavior.
+    Notably, blocks have to be copied on first access, which makes volume
+    I/O extremely slow on fresh volumes.
+
+    Furthermore, we may want to run operations, tests, etc on alternative
+    filesystems so we examine behavior on different filesystems.
+
+    This function is used to facilitate executing operations on alternate
+    volumes.
+    """
+    sftp = ssh_client.open_sftp()
+
+    with sftp.open('/hgdev/prepare-hgdev', 'wb') as fh:
+        fh.write(PREPARE_HGDEV)
+        fh.chmod(0o0777)
+
+    command = 'sudo /hgdev/prepare-hgdev %s' % filesystem
+    chan, stdin, stdout = exec_command(ssh_client, command)
+    stdin.close()
+
+    for line in stdout:
+        print(line, end='')
+
+    res = chan.recv_exit_status()
+
+    if res:
+        raise Exception('non-0 exit code updating working directory; %d'
+                        % res)
+
+
+def synchronize_hg(source_path: pathlib.Path, ec2_instance, revision: str=None):
+    """Synchronize a local Mercurial source path to remote EC2 instance."""
+
+    with tempfile.TemporaryDirectory() as temp_dir:
+        temp_dir = pathlib.Path(temp_dir)
+
+        ssh_dir = temp_dir / '.ssh'
+        ssh_dir.mkdir()
+        ssh_dir.chmod(0o0700)
+
+        public_ip = ec2_instance.public_ip_address
+
+        ssh_config = ssh_dir / 'config'
+
+        with ssh_config.open('w', encoding='utf-8') as fh:
+            fh.write('Host %s\n' % public_ip)
+            fh.write('  User hg\n')
+            fh.write('  StrictHostKeyChecking no\n')
+            fh.write('  UserKnownHostsFile %s\n' % (ssh_dir / 'known_hosts'))
+            fh.write('  IdentityFile %s\n' % ec2_instance.ssh_private_key_path)
+
+        if not (source_path / '.hg').is_dir():
+            raise Exception('%s is not a Mercurial repository; synchronization '
+                            'not yet supported' % source_path)
+
+        env = dict(os.environ)
+        env['HGPLAIN'] = '1'
+        env['HGENCODING'] = 'utf-8'
+
+        hg_bin = source_path / 'hg'
+
+        res = subprocess.run(
+            ['python2.7', str(hg_bin), 'log', '-r', revision, '-T', '{node}'],
+            cwd=str(source_path), env=env, check=True, capture_output=True)
+
+        full_revision = res.stdout.decode('ascii')
+
+        args = [
+            'python2.7', str(hg_bin),
+            '--config', 'ui.ssh=ssh -F %s' % ssh_config,
+            '--config', 'ui.remotecmd=/hgdev/venv-bootstrap/bin/hg',
+            'push', '-f', '-r', full_revision,
+            'ssh://%s//hgwork/src' % public_ip,
+        ]
+
+        subprocess.run(args, cwd=str(source_path), env=env, check=True)
+
+        # TODO support synchronizing dirty working directory.
+
+        sftp = ec2_instance.ssh_client.open_sftp()
+
+        with sftp.open('/hgdev/hgup', 'wb') as fh:
+            fh.write(HG_UPDATE_CLEAN)
+            fh.chmod(0o0700)
+
+        chan, stdin, stdout = exec_command(
+            ec2_instance.ssh_client, '/hgdev/hgup %s' % full_revision)
+        stdin.close()
+
+        for line in stdout:
+            print(line, end='')
+
+        res = chan.recv_exit_status()
+
+        if res:
+            raise Exception('non-0 exit code updating working directory; %d'
+                            % res)
+
+
+def run_tests(ssh_client, python_version, test_flags=None):
+    """Run tests on a remote Linux machine via an SSH client."""
+    test_flags = test_flags or []
+
+    print('running tests')
+
+    if python_version == 'system2':
+        python = '/usr/bin/python2'
+    elif python_version == 'system3':
+        python = '/usr/bin/python3'
+    elif python_version.startswith('pypy'):
+        python = '/hgdev/pyenv/shims/%s' % python_version
+    else:
+        python = '/hgdev/pyenv/shims/python%s' % python_version
+
+    test_flags = ' '.join(shlex.quote(a) for a in test_flags)
+
+    command = (
+        '/bin/sh -c "export TMPDIR=/hgwork/tmp; '
+        'cd /hgwork/src/tests && %s run-tests.py %s"' % (
+            python, test_flags))
+
+    chan, stdin, stdout = exec_command(ssh_client, command)
+
+    stdin.close()
+
+    for line in stdout:
+        print(line, end='')
+
+    return chan.recv_exit_status()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/contrib/automation/hgautomation/ssh.py	Mon May 20 11:40:47 2019 -0400
@@ -0,0 +1,67 @@
+# ssh.py - Interact with remote SSH servers
+#
+# Copyright 2019 Gregory Szorc <gregory.szorc@gmail.com>
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+
+# no-check-code because Python 3 native.
+
+import socket
+import time
+import warnings
+
+from cryptography.utils import (
+    CryptographyDeprecationWarning,
+)
+import paramiko
+
+
+def wait_for_ssh(hostname, port, timeout=60, username=None, key_filename=None):
+    """Wait for an SSH server to start on the specified host and port."""
+    class IgnoreHostKeyPolicy(paramiko.MissingHostKeyPolicy):
+        def missing_host_key(self, client, hostname, key):
+            return
+
+    end_time = time.time() + timeout
+
+    # paramiko triggers a CryptographyDeprecationWarning in the cryptography
+    # package. Let's suppress
+    with warnings.catch_warnings():
+        warnings.filterwarnings('ignore',
+                                category=CryptographyDeprecationWarning)
+
+        while True:
+            client = paramiko.SSHClient()
+            client.set_missing_host_key_policy(IgnoreHostKeyPolicy())
+            try:
+                client.connect(hostname, port=port, username=username,
+                               key_filename=key_filename,
+                               timeout=5.0, allow_agent=False,
+                               look_for_keys=False)
+
+                return client
+            except socket.error:
+                pass
+            except paramiko.AuthenticationException:
+                raise
+            except paramiko.SSHException:
+                pass
+
+            if time.time() >= end_time:
+                raise Exception('Timeout reached waiting for SSH')
+
+            time.sleep(1.0)
+
+
+def exec_command(client, command):
+    """exec_command wrapper that combines stderr/stdout and returns channel"""
+    chan = client.get_transport().open_session()
+
+    chan.exec_command(command)
+    chan.set_combine_stderr(True)
+
+    stdin = chan.makefile('wb', -1)
+    stdout = chan.makefile('r', -1)
+
+    return chan, stdin, stdout
--- a/contrib/automation/hgautomation/windows.py	Mon May 20 10:08:28 2019 +0200
+++ b/contrib/automation/hgautomation/windows.py	Mon May 20 11:40:47 2019 -0400
@@ -156,6 +156,10 @@
             fh.write('  UserKnownHostsFile %s\n' % (ssh_dir / 'known_hosts'))
             fh.write('  IdentityFile %s\n' % (ssh_dir / 'id_rsa'))
 
+        if not (hg_repo / '.hg').is_dir():
+            raise Exception('%s is not a Mercurial repository; '
+                            'synchronization not yet supported' % hg_repo)
+
         env = dict(os.environ)
         env['HGPLAIN'] = '1'
         env['HGENCODING'] = 'utf-8'
@@ -172,7 +176,8 @@
             'python2.7', hg_bin,
             '--config', 'ui.ssh=ssh -F %s' % ssh_config,
             '--config', 'ui.remotecmd=c:/hgdev/venv-bootstrap/Scripts/hg.exe',
-            'push', '-r', full_revision, 'ssh://%s/c:/hgdev/src' % public_ip,
+            'push', '-f', '-r', full_revision,
+            'ssh://%s/c:/hgdev/src' % public_ip,
         ]
 
         subprocess.run(args, cwd=str(hg_repo), env=env, check=True)
--- a/contrib/automation/hgautomation/winrm.py	Mon May 20 10:08:28 2019 +0200
+++ b/contrib/automation/hgautomation/winrm.py	Mon May 20 11:40:47 2019 -0400
@@ -25,7 +25,7 @@
 logger = logging.getLogger(__name__)
 
 
-def wait_for_winrm(host, username, password, timeout=120, ssl=False):
+def wait_for_winrm(host, username, password, timeout=180, ssl=False):
     """Wait for the Windows Remoting (WinRM) service to become available.
 
     Returns a ``psrpclient.Client`` instance.
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/contrib/automation/linux-requirements-py2.txt	Mon May 20 11:40:47 2019 -0400
@@ -0,0 +1,130 @@
+#
+# This file is autogenerated by pip-compile
+# To update, run:
+#
+#    pip-compile -U --generate-hashes --output-file contrib/automation/linux-requirements-py2.txt contrib/automation/linux-requirements.txt.in
+#
+astroid==1.6.6 \
+    --hash=sha256:87de48a92e29cedf7210ffa853d11441e7ad94cb47bacd91b023499b51cbc756 \
+    --hash=sha256:d25869fc7f44f1d9fb7d24fd7ea0639656f5355fc3089cd1f3d18c6ec6b124c7 \
+    # via pylint
+backports.functools-lru-cache==1.5 \
+    --hash=sha256:9d98697f088eb1b0fa451391f91afb5e3ebde16bbdb272819fd091151fda4f1a \
+    --hash=sha256:f0b0e4eba956de51238e17573b7087e852dfe9854afd2e9c873f73fc0ca0a6dd \
+    # via astroid, isort, pylint
+bzr==2.7.0 ; python_version <= "2.7" and platform_python_implementation == "CPython" \
+    --hash=sha256:c9f6bbe0a50201dadc5fddadd94ba50174193c6cf6e39e16f6dd0ad98a1df338
+configparser==3.7.4 \
+    --hash=sha256:8be81d89d6e7b4c0d4e44bcc525845f6da25821de80cb5e06e7e0238a2899e32 \
+    --hash=sha256:da60d0014fd8c55eb48c1c5354352e363e2d30bbf7057e5e171a468390184c75 \
+    # via pylint
+contextlib2==0.5.5 \
+    --hash=sha256:509f9419ee91cdd00ba34443217d5ca51f5a364a404e1dce9e8979cea969ca48 \
+    --hash=sha256:f5260a6e679d2ff42ec91ec5252f4eeffdcf21053db9113bd0a8e4d953769c00 \
+    # via vcrpy
+docutils==0.14 \
+    --hash=sha256:02aec4bd92ab067f6ff27a38a38a41173bf01bed8f89157768c1573f53e474a6 \
+    --hash=sha256:51e64ef2ebfb29cae1faa133b3710143496eca21c530f3f71424d77687764274 \
+    --hash=sha256:7a4bd47eaf6596e1295ecb11361139febe29b084a87bf005bf899f9a42edc3c6
+enum34==1.1.6 \
+    --hash=sha256:2d81cbbe0e73112bdfe6ef8576f2238f2ba27dd0d55752a776c41d38b7da2850 \
+    --hash=sha256:644837f692e5f550741432dd3f223bbb9852018674981b1664e5dc339387588a \
+    --hash=sha256:6bd0f6ad48ec2aa117d3d141940d484deccda84d4fcd884f5c3d93c23ecd8c79 \
+    --hash=sha256:8ad8c4783bf61ded74527bffb48ed9b54166685e4230386a9ed9b1279e2df5b1 \
+    # via astroid
+funcsigs==1.0.2 \
+    --hash=sha256:330cc27ccbf7f1e992e69fef78261dc7c6569012cf397db8d3de0234e6c937ca \
+    --hash=sha256:a7bb0f2cf3a3fd1ab2732cb49eba4252c2af4240442415b4abce3b87022a8f50 \
+    # via mock
+futures==3.2.0 \
+    --hash=sha256:9ec02aa7d674acb8618afb127e27fde7fc68994c0437ad759fa094a574adb265 \
+    --hash=sha256:ec0a6cb848cc212002b9828c3e34c675e0c9ff6741dc445cab6fdd4e1085d1f1 \
+    # via isort
+fuzzywuzzy==0.17.0 \
+    --hash=sha256:5ac7c0b3f4658d2743aa17da53a55598144edbc5bee3c6863840636e6926f254 \
+    --hash=sha256:6f49de47db00e1c71d40ad16da42284ac357936fa9b66bea1df63fed07122d62
+isort==4.3.17 \
+    --hash=sha256:01cb7e1ca5e6c5b3f235f0385057f70558b70d2f00320208825fa62887292f43 \
+    --hash=sha256:268067462aed7eb2a1e237fcb287852f22077de3fb07964e87e00f829eea2d1a \
+    # via pylint
+lazy-object-proxy==1.3.1 \
+    --hash=sha256:0ce34342b419bd8f018e6666bfef729aec3edf62345a53b537a4dcc115746a33 \
+    --hash=sha256:1b668120716eb7ee21d8a38815e5eb3bb8211117d9a90b0f8e21722c0758cc39 \
+    --hash=sha256:209615b0fe4624d79e50220ce3310ca1a9445fd8e6d3572a896e7f9146bbf019 \
+    --hash=sha256:27bf62cb2b1a2068d443ff7097ee33393f8483b570b475db8ebf7e1cba64f088 \
+    --hash=sha256:27ea6fd1c02dcc78172a82fc37fcc0992a94e4cecf53cb6d73f11749825bd98b \
+    --hash=sha256:2c1b21b44ac9beb0fc848d3993924147ba45c4ebc24be19825e57aabbe74a99e \
+    --hash=sha256:2df72ab12046a3496a92476020a1a0abf78b2a7db9ff4dc2036b8dd980203ae6 \
+    --hash=sha256:320ffd3de9699d3892048baee45ebfbbf9388a7d65d832d7e580243ade426d2b \
+    --hash=sha256:50e3b9a464d5d08cc5227413db0d1c4707b6172e4d4d915c1c70e4de0bbff1f5 \
+    --hash=sha256:5276db7ff62bb7b52f77f1f51ed58850e315154249aceb42e7f4c611f0f847ff \
+    --hash=sha256:61a6cf00dcb1a7f0c773ed4acc509cb636af2d6337a08f362413c76b2b47a8dd \
+    --hash=sha256:6ae6c4cb59f199d8827c5a07546b2ab7e85d262acaccaacd49b62f53f7c456f7 \
+    --hash=sha256:7661d401d60d8bf15bb5da39e4dd72f5d764c5aff5a86ef52a042506e3e970ff \
+    --hash=sha256:7bd527f36a605c914efca5d3d014170b2cb184723e423d26b1fb2fd9108e264d \
+    --hash=sha256:7cb54db3535c8686ea12e9535eb087d32421184eacc6939ef15ef50f83a5e7e2 \
+    --hash=sha256:7f3a2d740291f7f2c111d86a1c4851b70fb000a6c8883a59660d95ad57b9df35 \
+    --hash=sha256:81304b7d8e9c824d058087dcb89144842c8e0dea6d281c031f59f0acf66963d4 \
+    --hash=sha256:933947e8b4fbe617a51528b09851685138b49d511af0b6c0da2539115d6d4514 \
+    --hash=sha256:94223d7f060301b3a8c09c9b3bc3294b56b2188e7d8179c762a1cda72c979252 \
+    --hash=sha256:ab3ca49afcb47058393b0122428358d2fbe0408cf99f1b58b295cfeb4ed39109 \
+    --hash=sha256:bd6292f565ca46dee4e737ebcc20742e3b5be2b01556dafe169f6c65d088875f \
+    --hash=sha256:cb924aa3e4a3fb644d0c463cad5bc2572649a6a3f68a7f8e4fbe44aaa6d77e4c \
+    --hash=sha256:d0fc7a286feac9077ec52a927fc9fe8fe2fabab95426722be4c953c9a8bede92 \
+    --hash=sha256:ddc34786490a6e4ec0a855d401034cbd1242ef186c20d79d2166d6a4bd449577 \
+    --hash=sha256:e34b155e36fa9da7e1b7c738ed7767fc9491a62ec6af70fe9da4a057759edc2d \
+    --hash=sha256:e5b9e8f6bda48460b7b143c3821b21b452cb3a835e6bbd5dd33aa0c8d3f5137d \
+    --hash=sha256:e81ebf6c5ee9684be8f2c87563880f93eedd56dd2b6146d8a725b50b7e5adb0f \
+    --hash=sha256:eb91be369f945f10d3a49f5f9be8b3d0b93a4c2be8f8a5b83b0571b8123e0a7a \
+    --hash=sha256:f460d1ceb0e4a5dcb2a652db0904224f367c9b3c1470d5a7683c0480e582468b \
+    # via astroid
+mccabe==0.6.1 \
+    --hash=sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42 \
+    --hash=sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f \
+    # via pylint
+mock==2.0.0 \
+    --hash=sha256:5ce3c71c5545b472da17b72268978914d0252980348636840bd34a00b5cc96c1 \
+    --hash=sha256:b158b6df76edd239b8208d481dc46b6afd45a846b7812ff0ce58971cf5bc8bba \
+    # via vcrpy
+pbr==5.1.3 \
+    --hash=sha256:8257baf496c8522437e8a6cfe0f15e00aedc6c0e0e7c9d55eeeeab31e0853843 \
+    --hash=sha256:8c361cc353d988e4f5b998555c88098b9d5964c2e11acf7b0d21925a66bb5824 \
+    # via mock
+pyflakes==2.1.1 \
+    --hash=sha256:17dbeb2e3f4d772725c777fabc446d5634d1038f234e77343108ce445ea69ce0 \
+    --hash=sha256:d976835886f8c5b31d47970ed689944a0262b5f3afa00a5a7b4dc81e5449f8a2
+pygments==2.3.1 \
+    --hash=sha256:5ffada19f6203563680669ee7f53b64dabbeb100eb51b61996085e99c03b284a \
+    --hash=sha256:e8218dd399a61674745138520d0d4cf2621d7e032439341bc3f647bff125818d
+pylint==1.9.4 \
+    --hash=sha256:02c2b6d268695a8b64ad61847f92e611e6afcff33fd26c3a2125370c4662905d \
+    --hash=sha256:ee1e85575587c5b58ddafa25e1c1b01691ef172e139fc25585e5d3f02451da93
+python-levenshtein==0.12.0 \
+    --hash=sha256:033a11de5e3d19ea25c9302d11224e1a1898fe5abd23c61c7c360c25195e3eb1
+pyyaml==5.1 \
+    --hash=sha256:1adecc22f88d38052fb787d959f003811ca858b799590a5eaa70e63dca50308c \
+    --hash=sha256:436bc774ecf7c103814098159fbb84c2715d25980175292c648f2da143909f95 \
+    --hash=sha256:460a5a4248763f6f37ea225d19d5c205677d8d525f6a83357ca622ed541830c2 \
+    --hash=sha256:5a22a9c84653debfbf198d02fe592c176ea548cccce47553f35f466e15cf2fd4 \
+    --hash=sha256:7a5d3f26b89d688db27822343dfa25c599627bc92093e788956372285c6298ad \
+    --hash=sha256:9372b04a02080752d9e6f990179a4ab840227c6e2ce15b95e1278456664cf2ba \
+    --hash=sha256:a5dcbebee834eaddf3fa7366316b880ff4062e4bcc9787b78c7fbb4a26ff2dd1 \
+    --hash=sha256:aee5bab92a176e7cd034e57f46e9df9a9862a71f8f37cad167c6fc74c65f5b4e \
+    --hash=sha256:c51f642898c0bacd335fc119da60baae0824f2cde95b0330b56c0553439f0673 \
+    --hash=sha256:c68ea4d3ba1705da1e0d85da6684ac657912679a649e8868bd850d2c299cce13 \
+    --hash=sha256:e23d0cc5299223dcc37885dae624f382297717e459ea24053709675a976a3e19 \
+    # via vcrpy
+singledispatch==3.4.0.3 \
+    --hash=sha256:5b06af87df13818d14f08a028e42f566640aef80805c3b50c5056b086e3c2b9c \
+    --hash=sha256:833b46966687b3de7f438c761ac475213e53b306740f1abfaa86e1d1aae56aa8 \
+    # via astroid, pylint
+six==1.12.0 \
+    --hash=sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c \
+    --hash=sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73 \
+    # via astroid, mock, pylint, singledispatch, vcrpy
+vcrpy==2.0.1 \
+    --hash=sha256:127e79cf7b569d071d1bd761b83f7b62b2ce2a2eb63ceca7aa67cba8f2602ea3 \
+    --hash=sha256:57be64aa8e9883a4117d0b15de28af62275c001abcdb00b6dc2d4406073d9a4f
+wrapt==1.11.1 \
+    --hash=sha256:4aea003270831cceb8a90ff27c4031da6ead7ec1886023b80ce0dfe0adf61533 \
+    # via astroid, vcrpy
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/contrib/automation/linux-requirements-py3.txt	Mon May 20 11:40:47 2019 -0400
@@ -0,0 +1,159 @@
+#
+# This file is autogenerated by pip-compile
+# To update, run:
+#
+#    pip-compile -U --generate-hashes --output-file contrib/automation/linux-requirements-py3.txt contrib/automation/linux-requirements.txt.in
+#
+astroid==2.2.5 \
+    --hash=sha256:6560e1e1749f68c64a4b5dee4e091fce798d2f0d84ebe638cf0e0585a343acf4 \
+    --hash=sha256:b65db1bbaac9f9f4d190199bb8680af6f6f84fd3769a5ea883df8a91fe68b4c4 \
+    # via pylint
+docutils==0.14 \
+    --hash=sha256:02aec4bd92ab067f6ff27a38a38a41173bf01bed8f89157768c1573f53e474a6 \
+    --hash=sha256:51e64ef2ebfb29cae1faa133b3710143496eca21c530f3f71424d77687764274 \
+    --hash=sha256:7a4bd47eaf6596e1295ecb11361139febe29b084a87bf005bf899f9a42edc3c6
+fuzzywuzzy==0.17.0 \
+    --hash=sha256:5ac7c0b3f4658d2743aa17da53a55598144edbc5bee3c6863840636e6926f254 \
+    --hash=sha256:6f49de47db00e1c71d40ad16da42284ac357936fa9b66bea1df63fed07122d62
+idna==2.8 \
+    --hash=sha256:c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407 \
+    --hash=sha256:ea8b7f6188e6fa117537c3df7da9fc686d485087abf6ac197f9c46432f7e4a3c \
+    # via yarl
+isort==4.3.17 \
+    --hash=sha256:01cb7e1ca5e6c5b3f235f0385057f70558b70d2f00320208825fa62887292f43 \
+    --hash=sha256:268067462aed7eb2a1e237fcb287852f22077de3fb07964e87e00f829eea2d1a \
+    # via pylint
+lazy-object-proxy==1.3.1 \
+    --hash=sha256:0ce34342b419bd8f018e6666bfef729aec3edf62345a53b537a4dcc115746a33 \
+    --hash=sha256:1b668120716eb7ee21d8a38815e5eb3bb8211117d9a90b0f8e21722c0758cc39 \
+    --hash=sha256:209615b0fe4624d79e50220ce3310ca1a9445fd8e6d3572a896e7f9146bbf019 \
+    --hash=sha256:27bf62cb2b1a2068d443ff7097ee33393f8483b570b475db8ebf7e1cba64f088 \
+    --hash=sha256:27ea6fd1c02dcc78172a82fc37fcc0992a94e4cecf53cb6d73f11749825bd98b \
+    --hash=sha256:2c1b21b44ac9beb0fc848d3993924147ba45c4ebc24be19825e57aabbe74a99e \
+    --hash=sha256:2df72ab12046a3496a92476020a1a0abf78b2a7db9ff4dc2036b8dd980203ae6 \
+    --hash=sha256:320ffd3de9699d3892048baee45ebfbbf9388a7d65d832d7e580243ade426d2b \
+    --hash=sha256:50e3b9a464d5d08cc5227413db0d1c4707b6172e4d4d915c1c70e4de0bbff1f5 \
+    --hash=sha256:5276db7ff62bb7b52f77f1f51ed58850e315154249aceb42e7f4c611f0f847ff \
+    --hash=sha256:61a6cf00dcb1a7f0c773ed4acc509cb636af2d6337a08f362413c76b2b47a8dd \
+    --hash=sha256:6ae6c4cb59f199d8827c5a07546b2ab7e85d262acaccaacd49b62f53f7c456f7 \
+    --hash=sha256:7661d401d60d8bf15bb5da39e4dd72f5d764c5aff5a86ef52a042506e3e970ff \
+    --hash=sha256:7bd527f36a605c914efca5d3d014170b2cb184723e423d26b1fb2fd9108e264d \
+    --hash=sha256:7cb54db3535c8686ea12e9535eb087d32421184eacc6939ef15ef50f83a5e7e2 \
+    --hash=sha256:7f3a2d740291f7f2c111d86a1c4851b70fb000a6c8883a59660d95ad57b9df35 \
+    --hash=sha256:81304b7d8e9c824d058087dcb89144842c8e0dea6d281c031f59f0acf66963d4 \
+    --hash=sha256:933947e8b4fbe617a51528b09851685138b49d511af0b6c0da2539115d6d4514 \
+    --hash=sha256:94223d7f060301b3a8c09c9b3bc3294b56b2188e7d8179c762a1cda72c979252 \
+    --hash=sha256:ab3ca49afcb47058393b0122428358d2fbe0408cf99f1b58b295cfeb4ed39109 \
+    --hash=sha256:bd6292f565ca46dee4e737ebcc20742e3b5be2b01556dafe169f6c65d088875f \
+    --hash=sha256:cb924aa3e4a3fb644d0c463cad5bc2572649a6a3f68a7f8e4fbe44aaa6d77e4c \
+    --hash=sha256:d0fc7a286feac9077ec52a927fc9fe8fe2fabab95426722be4c953c9a8bede92 \
+    --hash=sha256:ddc34786490a6e4ec0a855d401034cbd1242ef186c20d79d2166d6a4bd449577 \
+    --hash=sha256:e34b155e36fa9da7e1b7c738ed7767fc9491a62ec6af70fe9da4a057759edc2d \
+    --hash=sha256:e5b9e8f6bda48460b7b143c3821b21b452cb3a835e6bbd5dd33aa0c8d3f5137d \
+    --hash=sha256:e81ebf6c5ee9684be8f2c87563880f93eedd56dd2b6146d8a725b50b7e5adb0f \
+    --hash=sha256:eb91be369f945f10d3a49f5f9be8b3d0b93a4c2be8f8a5b83b0571b8123e0a7a \
+    --hash=sha256:f460d1ceb0e4a5dcb2a652db0904224f367c9b3c1470d5a7683c0480e582468b \
+    # via astroid
+mccabe==0.6.1 \
+    --hash=sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42 \
+    --hash=sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f \
+    # via pylint
+multidict==4.5.2 \
+    --hash=sha256:024b8129695a952ebd93373e45b5d341dbb87c17ce49637b34000093f243dd4f \
+    --hash=sha256:041e9442b11409be5e4fc8b6a97e4bcead758ab1e11768d1e69160bdde18acc3 \
+    --hash=sha256:045b4dd0e5f6121e6f314d81759abd2c257db4634260abcfe0d3f7083c4908ef \
+    --hash=sha256:047c0a04e382ef8bd74b0de01407e8d8632d7d1b4db6f2561106af812a68741b \
+    --hash=sha256:068167c2d7bbeebd359665ac4fff756be5ffac9cda02375b5c5a7c4777038e73 \
+    --hash=sha256:148ff60e0fffa2f5fad2eb25aae7bef23d8f3b8bdaf947a65cdbe84a978092bc \
+    --hash=sha256:1d1c77013a259971a72ddaa83b9f42c80a93ff12df6a4723be99d858fa30bee3 \
+    --hash=sha256:1d48bc124a6b7a55006d97917f695effa9725d05abe8ee78fd60d6588b8344cd \
+    --hash=sha256:31dfa2fc323097f8ad7acd41aa38d7c614dd1960ac6681745b6da124093dc351 \
+    --hash=sha256:34f82db7f80c49f38b032c5abb605c458bac997a6c3142e0d6c130be6fb2b941 \
+    --hash=sha256:3d5dd8e5998fb4ace04789d1d008e2bb532de501218519d70bb672c4c5a2fc5d \
+    --hash=sha256:4a6ae52bd3ee41ee0f3acf4c60ceb3f44e0e3bc52ab7da1c2b2aa6703363a3d1 \
+    --hash=sha256:4b02a3b2a2f01d0490dd39321c74273fed0568568ea0e7ea23e02bd1fb10a10b \
+    --hash=sha256:4b843f8e1dd6a3195679d9838eb4670222e8b8d01bc36c9894d6c3538316fa0a \
+    --hash=sha256:5de53a28f40ef3c4fd57aeab6b590c2c663de87a5af76136ced519923d3efbb3 \
+    --hash=sha256:61b2b33ede821b94fa99ce0b09c9ece049c7067a33b279f343adfe35108a4ea7 \
+    --hash=sha256:6a3a9b0f45fd75dc05d8e93dc21b18fc1670135ec9544d1ad4acbcf6b86781d0 \
+    --hash=sha256:76ad8e4c69dadbb31bad17c16baee61c0d1a4a73bed2590b741b2e1a46d3edd0 \
+    --hash=sha256:7ba19b777dc00194d1b473180d4ca89a054dd18de27d0ee2e42a103ec9b7d014 \
+    --hash=sha256:7c1b7eab7a49aa96f3db1f716f0113a8a2e93c7375dd3d5d21c4941f1405c9c5 \
+    --hash=sha256:7fc0eee3046041387cbace9314926aa48b681202f8897f8bff3809967a049036 \
+    --hash=sha256:8ccd1c5fff1aa1427100ce188557fc31f1e0a383ad8ec42c559aabd4ff08802d \
+    --hash=sha256:8e08dd76de80539d613654915a2f5196dbccc67448df291e69a88712ea21e24a \
+    --hash=sha256:c18498c50c59263841862ea0501da9f2b3659c00db54abfbf823a80787fde8ce \
+    --hash=sha256:c49db89d602c24928e68c0d510f4fcf8989d77defd01c973d6cbe27e684833b1 \
+    --hash=sha256:ce20044d0317649ddbb4e54dab3c1bcc7483c78c27d3f58ab3d0c7e6bc60d26a \
+    --hash=sha256:d1071414dd06ca2eafa90c85a079169bfeb0e5f57fd0b45d44c092546fcd6fd9 \
+    --hash=sha256:d3be11ac43ab1a3e979dac80843b42226d5d3cccd3986f2e03152720a4297cd7 \
+    --hash=sha256:db603a1c235d110c860d5f39988ebc8218ee028f07a7cbc056ba6424372ca31b \
+    # via yarl
+pyflakes==2.1.1 \
+    --hash=sha256:17dbeb2e3f4d772725c777fabc446d5634d1038f234e77343108ce445ea69ce0 \
+    --hash=sha256:d976835886f8c5b31d47970ed689944a0262b5f3afa00a5a7b4dc81e5449f8a2
+pygments==2.3.1 \
+    --hash=sha256:5ffada19f6203563680669ee7f53b64dabbeb100eb51b61996085e99c03b284a \
+    --hash=sha256:e8218dd399a61674745138520d0d4cf2621d7e032439341bc3f647bff125818d
+pylint==2.3.1 \
+    --hash=sha256:5d77031694a5fb97ea95e828c8d10fc770a1df6eb3906067aaed42201a8a6a09 \
+    --hash=sha256:723e3db49555abaf9bf79dc474c6b9e2935ad82230b10c1138a71ea41ac0fff1
+python-levenshtein==0.12.0 \
+    --hash=sha256:033a11de5e3d19ea25c9302d11224e1a1898fe5abd23c61c7c360c25195e3eb1
+pyyaml==5.1 \
+    --hash=sha256:1adecc22f88d38052fb787d959f003811ca858b799590a5eaa70e63dca50308c \
+    --hash=sha256:436bc774ecf7c103814098159fbb84c2715d25980175292c648f2da143909f95 \
+    --hash=sha256:460a5a4248763f6f37ea225d19d5c205677d8d525f6a83357ca622ed541830c2 \
+    --hash=sha256:5a22a9c84653debfbf198d02fe592c176ea548cccce47553f35f466e15cf2fd4 \
+    --hash=sha256:7a5d3f26b89d688db27822343dfa25c599627bc92093e788956372285c6298ad \
+    --hash=sha256:9372b04a02080752d9e6f990179a4ab840227c6e2ce15b95e1278456664cf2ba \
+    --hash=sha256:a5dcbebee834eaddf3fa7366316b880ff4062e4bcc9787b78c7fbb4a26ff2dd1 \
+    --hash=sha256:aee5bab92a176e7cd034e57f46e9df9a9862a71f8f37cad167c6fc74c65f5b4e \
+    --hash=sha256:c51f642898c0bacd335fc119da60baae0824f2cde95b0330b56c0553439f0673 \
+    --hash=sha256:c68ea4d3ba1705da1e0d85da6684ac657912679a649e8868bd850d2c299cce13 \
+    --hash=sha256:e23d0cc5299223dcc37885dae624f382297717e459ea24053709675a976a3e19 \
+    # via vcrpy
+six==1.12.0 \
+    --hash=sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c \
+    --hash=sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73 \
+    # via astroid, vcrpy
+typed-ast==1.3.4 ; python_version >= "3.0" and platform_python_implementation != "PyPy" \
+    --hash=sha256:04894d268ba6eab7e093d43107869ad49e7b5ef40d1a94243ea49b352061b200 \
+    --hash=sha256:16616ece19daddc586e499a3d2f560302c11f122b9c692bc216e821ae32aa0d0 \
+    --hash=sha256:252fdae740964b2d3cdfb3f84dcb4d6247a48a6abe2579e8029ab3be3cdc026c \
+    --hash=sha256:2af80a373af123d0b9f44941a46df67ef0ff7a60f95872412a145f4500a7fc99 \
+    --hash=sha256:2c88d0a913229a06282b285f42a31e063c3bf9071ff65c5ea4c12acb6977c6a7 \
+    --hash=sha256:2ea99c029ebd4b5a308d915cc7fb95b8e1201d60b065450d5d26deb65d3f2bc1 \
+    --hash=sha256:3d2e3ab175fc097d2a51c7a0d3fda442f35ebcc93bb1d7bd9b95ad893e44c04d \
+    --hash=sha256:4766dd695548a15ee766927bf883fb90c6ac8321be5a60c141f18628fb7f8da8 \
+    --hash=sha256:56b6978798502ef66625a2e0f80cf923da64e328da8bbe16c1ff928c70c873de \
+    --hash=sha256:5cddb6f8bce14325b2863f9d5ac5c51e07b71b462361fd815d1d7706d3a9d682 \
+    --hash=sha256:644ee788222d81555af543b70a1098f2025db38eaa99226f3a75a6854924d4db \
+    --hash=sha256:64cf762049fc4775efe6b27161467e76d0ba145862802a65eefc8879086fc6f8 \
+    --hash=sha256:68c362848d9fb71d3c3e5f43c09974a0ae319144634e7a47db62f0f2a54a7fa7 \
+    --hash=sha256:6c1f3c6f6635e611d58e467bf4371883568f0de9ccc4606f17048142dec14a1f \
+    --hash=sha256:b213d4a02eec4ddf622f4d2fbc539f062af3788d1f332f028a2e19c42da53f15 \
+    --hash=sha256:bb27d4e7805a7de0e35bd0cb1411bc85f807968b2b0539597a49a23b00a622ae \
+    --hash=sha256:c9d414512eaa417aadae7758bc118868cd2396b0e6138c1dd4fda96679c079d3 \
+    --hash=sha256:f0937165d1e25477b01081c4763d2d9cdc3b18af69cb259dd4f640c9b900fe5e \
+    --hash=sha256:fb96a6e2c11059ecf84e6741a319f93f683e440e341d4489c9b161eca251cf2a \
+    --hash=sha256:fc71d2d6ae56a091a8d94f33ec9d0f2001d1cb1db423d8b4355debfe9ce689b7
+vcrpy==2.0.1 \
+    --hash=sha256:127e79cf7b569d071d1bd761b83f7b62b2ce2a2eb63ceca7aa67cba8f2602ea3 \
+    --hash=sha256:57be64aa8e9883a4117d0b15de28af62275c001abcdb00b6dc2d4406073d9a4f
+wrapt==1.11.1 \
+    --hash=sha256:4aea003270831cceb8a90ff27c4031da6ead7ec1886023b80ce0dfe0adf61533 \
+    # via astroid, vcrpy
+yarl==1.3.0 \
+    --hash=sha256:024ecdc12bc02b321bc66b41327f930d1c2c543fa9a561b39861da9388ba7aa9 \
+    --hash=sha256:2f3010703295fbe1aec51023740871e64bb9664c789cba5a6bdf404e93f7568f \
+    --hash=sha256:3890ab952d508523ef4881457c4099056546593fa05e93da84c7250516e632eb \
+    --hash=sha256:3e2724eb9af5dc41648e5bb304fcf4891adc33258c6e14e2a7414ea32541e320 \
+    --hash=sha256:5badb97dd0abf26623a9982cd448ff12cb39b8e4c94032ccdedf22ce01a64842 \
+    --hash=sha256:73f447d11b530d860ca1e6b582f947688286ad16ca42256413083d13f260b7a0 \
+    --hash=sha256:7ab825726f2940c16d92aaec7d204cfc34ac26c0040da727cf8ba87255a33829 \
+    --hash=sha256:b25de84a8c20540531526dfbb0e2d2b648c13fd5dd126728c496d7c3fea33310 \
+    --hash=sha256:c6e341f5a6562af74ba55205dbd56d248daf1b5748ec48a0200ba227bb9e33f4 \
+    --hash=sha256:c9bb7c249c4432cd47e75af3864bc02d26c9594f49c82e2a28624417f0ae63b8 \
+    --hash=sha256:e060906c0c585565c718d1c3841747b61c5439af2211e185f6739a9412dfbde1 \
+    # via vcrpy
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/contrib/automation/linux-requirements.txt.in	Mon May 20 11:40:47 2019 -0400
@@ -0,0 +1,12 @@
+# Bazaar doesn't work with Python 3 nor PyPy.
+bzr ; python_version <= '2.7' and platform_python_implementation == 'CPython'
+docutils
+fuzzywuzzy
+pyflakes
+pygments
+pylint
+# Needed to avoid warnings from fuzzywuzzy.
+python-Levenshtein
+# typed-ast dependency doesn't install on PyPy.
+typed-ast ; python_version >= '3.0' and platform_python_implementation != 'PyPy'
+vcrpy
--- a/contrib/automation/requirements.txt	Mon May 20 10:08:28 2019 +0200
+++ b/contrib/automation/requirements.txt	Mon May 20 11:40:47 2019 -0400
@@ -8,47 +8,68 @@
     --hash=sha256:2f1adbb7546ed199e3c90ef23ec95c5cf3585bac7d11fb7eb562a3fe89c64e87 \
     --hash=sha256:9d5c20441baf0cb60a4ac34cc447c6c189024b6b4c6cd7877034f4965c464e49 \
     # via cryptography
-boto3==1.9.111 \
-    --hash=sha256:06414c75d1f62af7d04fd652b38d1e4fd3cfd6b35bad978466af88e2aaecd00d \
-    --hash=sha256:f3b77dff382374773d02411fa47ee408f4f503aeebd837fd9dc9ed8635bc5e8e
-botocore==1.12.111 \
-    --hash=sha256:6af473c52d5e3e7ff82de5334e9fee96b2d5ec2df5d78bc00cd9937e2573a7a8 \
-    --hash=sha256:9f5123c7be704b17aeacae99b5842ab17bda1f799dd29134de8c70e0a50a45d7 \
+bcrypt==3.1.6 \
+    --hash=sha256:0ba875eb67b011add6d8c5b76afbd92166e98b1f1efab9433d5dc0fafc76e203 \
+    --hash=sha256:21ed446054c93e209434148ef0b362432bb82bbdaf7beef70a32c221f3e33d1c \
+    --hash=sha256:28a0459381a8021f57230954b9e9a65bb5e3d569d2c253c5cac6cb181d71cf23 \
+    --hash=sha256:2aed3091eb6f51c26b7c2fad08d6620d1c35839e7a362f706015b41bd991125e \
+    --hash=sha256:2fa5d1e438958ea90eaedbf8082c2ceb1a684b4f6c75a3800c6ec1e18ebef96f \
+    --hash=sha256:3a73f45484e9874252002793518da060fb11eaa76c30713faa12115db17d1430 \
+    --hash=sha256:3e489787638a36bb466cd66780e15715494b6d6905ffdbaede94440d6d8e7dba \
+    --hash=sha256:44636759d222baa62806bbceb20e96f75a015a6381690d1bc2eda91c01ec02ea \
+    --hash=sha256:678c21b2fecaa72a1eded0cf12351b153615520637efcadc09ecf81b871f1596 \
+    --hash=sha256:75460c2c3786977ea9768d6c9d8957ba31b5fbeb0aae67a5c0e96aab4155f18c \
+    --hash=sha256:8ac06fb3e6aacb0a95b56eba735c0b64df49651c6ceb1ad1cf01ba75070d567f \
+    --hash=sha256:8fdced50a8b646fff8fa0e4b1c5fd940ecc844b43d1da5a980cb07f2d1b1132f \
+    --hash=sha256:9b2c5b640a2da533b0ab5f148d87fb9989bf9bcb2e61eea6a729102a6d36aef9 \
+    --hash=sha256:a9083e7fa9adb1a4de5ac15f9097eb15b04e2c8f97618f1b881af40abce382e1 \
+    --hash=sha256:b7e3948b8b1a81c5a99d41da5fb2dc03ddb93b5f96fcd3fd27e643f91efa33e1 \
+    --hash=sha256:b998b8ca979d906085f6a5d84f7b5459e5e94a13fc27c28a3514437013b6c2f6 \
+    --hash=sha256:dd08c50bc6f7be69cd7ba0769acca28c846ec46b7a8ddc2acf4b9ac6f8a7457e \
+    --hash=sha256:de5badee458544ab8125e63e39afeedfcf3aef6a6e2282ac159c95ae7472d773 \
+    --hash=sha256:ede2a87333d24f55a4a7338a6ccdccf3eaa9bed081d1737e0db4dbd1a4f7e6b6 \
+    # via paramiko
+boto3==1.9.137 \
+    --hash=sha256:882cc4869b47b51dae4b4a900769e72171ff00e0b6bca644b2d7a7ad7378f324 \
+    --hash=sha256:cd503a7e7a04f1c14d2801f9727159dfa88c393b4004e98940fa4aa205d920c8
+botocore==1.12.137 \
+    --hash=sha256:0d95794f6b1239c75e2c5f966221bcd4b68020fddb5676f757531eedbb612ed8 \
+    --hash=sha256:3213cf48cf2ceee10fc3b93221f2cd1c38521cca7584f547d5c086213cc60f35 \
     # via boto3, s3transfer
 certifi==2019.3.9 \
     --hash=sha256:59b7658e26ca9c7339e00f8f4636cdfe59d34fa37b9b04f6f9e9926b3cece1a5 \
     --hash=sha256:b26104d6835d1f5e49452a26eb2ff87fe7090b89dfcaee5ea2212697e1e1d7ae \
     # via requests
-cffi==1.12.2 \
-    --hash=sha256:00b97afa72c233495560a0793cdc86c2571721b4271c0667addc83c417f3d90f \
-    --hash=sha256:0ba1b0c90f2124459f6966a10c03794082a2f3985cd699d7d63c4a8dae113e11 \
-    --hash=sha256:0bffb69da295a4fc3349f2ec7cbe16b8ba057b0a593a92cbe8396e535244ee9d \
-    --hash=sha256:21469a2b1082088d11ccd79dd84157ba42d940064abbfa59cf5f024c19cf4891 \
-    --hash=sha256:2e4812f7fa984bf1ab253a40f1f4391b604f7fc424a3e21f7de542a7f8f7aedf \
-    --hash=sha256:2eac2cdd07b9049dd4e68449b90d3ef1adc7c759463af5beb53a84f1db62e36c \
-    --hash=sha256:2f9089979d7456c74d21303c7851f158833d48fb265876923edcb2d0194104ed \
-    --hash=sha256:3dd13feff00bddb0bd2d650cdb7338f815c1789a91a6f68fdc00e5c5ed40329b \
-    --hash=sha256:4065c32b52f4b142f417af6f33a5024edc1336aa845b9d5a8d86071f6fcaac5a \
-    --hash=sha256:51a4ba1256e9003a3acf508e3b4f4661bebd015b8180cc31849da222426ef585 \
-    --hash=sha256:59888faac06403767c0cf8cfb3f4a777b2939b1fbd9f729299b5384f097f05ea \
-    --hash=sha256:59c87886640574d8b14910840327f5cd15954e26ed0bbd4e7cef95fa5aef218f \
-    --hash=sha256:610fc7d6db6c56a244c2701575f6851461753c60f73f2de89c79bbf1cc807f33 \
-    --hash=sha256:70aeadeecb281ea901bf4230c6222af0248c41044d6f57401a614ea59d96d145 \
-    --hash=sha256:71e1296d5e66c59cd2c0f2d72dc476d42afe02aeddc833d8e05630a0551dad7a \
-    --hash=sha256:8fc7a49b440ea752cfdf1d51a586fd08d395ff7a5d555dc69e84b1939f7ddee3 \
-    --hash=sha256:9b5c2afd2d6e3771d516045a6cfa11a8da9a60e3d128746a7fe9ab36dfe7221f \
-    --hash=sha256:9c759051ebcb244d9d55ee791259ddd158188d15adee3c152502d3b69005e6bd \
-    --hash=sha256:b4d1011fec5ec12aa7cc10c05a2f2f12dfa0adfe958e56ae38dc140614035804 \
-    --hash=sha256:b4f1d6332339ecc61275bebd1f7b674098a66fea11a00c84d1c58851e618dc0d \
-    --hash=sha256:c030cda3dc8e62b814831faa4eb93dd9a46498af8cd1d5c178c2de856972fd92 \
-    --hash=sha256:c2e1f2012e56d61390c0e668c20c4fb0ae667c44d6f6a2eeea5d7148dcd3df9f \
-    --hash=sha256:c37c77d6562074452120fc6c02ad86ec928f5710fbc435a181d69334b4de1d84 \
-    --hash=sha256:c8149780c60f8fd02752d0429246088c6c04e234b895c4a42e1ea9b4de8d27fb \
-    --hash=sha256:cbeeef1dc3c4299bd746b774f019de9e4672f7cc666c777cd5b409f0b746dac7 \
-    --hash=sha256:e113878a446c6228669144ae8a56e268c91b7f1fafae927adc4879d9849e0ea7 \
-    --hash=sha256:e21162bf941b85c0cda08224dade5def9360f53b09f9f259adb85fc7dd0e7b35 \
-    --hash=sha256:fb6934ef4744becbda3143d30c6604718871495a5e36c408431bf33d9c146889 \
-    # via cryptography
+cffi==1.12.3 \
+    --hash=sha256:041c81822e9f84b1d9c401182e174996f0bae9991f33725d059b771744290774 \
+    --hash=sha256:046ef9a22f5d3eed06334d01b1e836977eeef500d9b78e9ef693f9380ad0b83d \
+    --hash=sha256:066bc4c7895c91812eff46f4b1c285220947d4aa46fa0a2651ff85f2afae9c90 \
+    --hash=sha256:066c7ff148ae33040c01058662d6752fd73fbc8e64787229ea8498c7d7f4041b \
+    --hash=sha256:2444d0c61f03dcd26dbf7600cf64354376ee579acad77aef459e34efcb438c63 \
+    --hash=sha256:300832850b8f7967e278870c5d51e3819b9aad8f0a2c8dbe39ab11f119237f45 \
+    --hash=sha256:34c77afe85b6b9e967bd8154e3855e847b70ca42043db6ad17f26899a3df1b25 \
+    --hash=sha256:46de5fa00f7ac09f020729148ff632819649b3e05a007d286242c4882f7b1dc3 \
+    --hash=sha256:4aa8ee7ba27c472d429b980c51e714a24f47ca296d53f4d7868075b175866f4b \
+    --hash=sha256:4d0004eb4351e35ed950c14c11e734182591465a33e960a4ab5e8d4f04d72647 \
+    --hash=sha256:4e3d3f31a1e202b0f5a35ba3bc4eb41e2fc2b11c1eff38b362de710bcffb5016 \
+    --hash=sha256:50bec6d35e6b1aaeb17f7c4e2b9374ebf95a8975d57863546fa83e8d31bdb8c4 \
+    --hash=sha256:55cad9a6df1e2a1d62063f79d0881a414a906a6962bc160ac968cc03ed3efcfb \
+    --hash=sha256:5662ad4e4e84f1eaa8efce5da695c5d2e229c563f9d5ce5b0113f71321bcf753 \
+    --hash=sha256:59b4dc008f98fc6ee2bb4fd7fc786a8d70000d058c2bbe2698275bc53a8d3fa7 \
+    --hash=sha256:73e1ffefe05e4ccd7bcea61af76f36077b914f92b76f95ccf00b0c1b9186f3f9 \
+    --hash=sha256:a1f0fd46eba2d71ce1589f7e50a9e2ffaeb739fb2c11e8192aa2b45d5f6cc41f \
+    --hash=sha256:a2e85dc204556657661051ff4bab75a84e968669765c8a2cd425918699c3d0e8 \
+    --hash=sha256:a5457d47dfff24882a21492e5815f891c0ca35fefae8aa742c6c263dac16ef1f \
+    --hash=sha256:a8dccd61d52a8dae4a825cdbb7735da530179fea472903eb871a5513b5abbfdc \
+    --hash=sha256:ae61af521ed676cf16ae94f30fe202781a38d7178b6b4ab622e4eec8cefaff42 \
+    --hash=sha256:b012a5edb48288f77a63dba0840c92d0504aa215612da4541b7b42d849bc83a3 \
+    --hash=sha256:d2c5cfa536227f57f97c92ac30c8109688ace8fa4ac086d19d0af47d134e2909 \
+    --hash=sha256:d42b5796e20aacc9d15e66befb7a345454eef794fdb0737d1af593447c6c8f45 \
+    --hash=sha256:dee54f5d30d775f525894d67b1495625dd9322945e7fee00731952e0368ff42d \
+    --hash=sha256:e070535507bd6aa07124258171be2ee8dfc19119c28ca94c9dfb7efd23564512 \
+    --hash=sha256:e1ff2748c84d97b065cc95429814cdba39bcbd77c9c85c89344b317dc0d9cbff \
+    --hash=sha256:ed851c75d1e0e043cbf5ca9a8e1b13c4c90f3fbd863dacb01c0808e2b5204201 \
+    # via bcrypt, cryptography, pynacl
 chardet==3.0.4 \
     --hash=sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae \
     --hash=sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691 \
@@ -73,7 +94,7 @@
     --hash=sha256:d4afbb0840f489b60f5a580a41a1b9c3622e08ecb5eec8614d4fb4cd914c4460 \
     --hash=sha256:d9ed28030797c00f4bc43c86bf819266c76a5ea61d006cd4078a93ebf7da6bfd \
     --hash=sha256:e603aa7bb52e4e8ed4119a58a03b60323918467ef209e6ff9db3ac382e5cf2c6 \
-    # via pypsrp
+    # via paramiko, pypsrp
 docutils==0.14 \
     --hash=sha256:02aec4bd92ab067f6ff27a38a38a41173bf01bed8f89157768c1573f53e474a6 \
     --hash=sha256:51e64ef2ebfb29cae1faa133b3710143496eca21c530f3f71424d77687764274 \
@@ -87,13 +108,41 @@
     --hash=sha256:3720a4b1bd659dd2eecad0666459b9788813e032b83e7ba58578e48254e0a0e6 \
     --hash=sha256:bde2aef6f44302dfb30320115b17d030798de8c4110e28d5cf6cf91a7a31074c \
     # via boto3, botocore
-ntlm-auth==1.2.0 \
-    --hash=sha256:7bc02a3fbdfee7275d3dc20fce8028ed8eb6d32364637f28be9e9ae9160c6d5c \
-    --hash=sha256:9b13eaf88f16a831637d75236a93d60c0049536715aafbf8190ba58a590b023e \
+ntlm-auth==1.3.0 \
+    --hash=sha256:bb2fd03c665f0f62c5f65695b62dcdb07fb7a45df6ebc86c770be2054d6902dd \
+    --hash=sha256:ce5b4483ed761f341a538a426a71a52e5a9cf5fd834ebef1d2090f9eef14b3f8 \
     # via pypsrp
+paramiko==2.4.2 \
+    --hash=sha256:3c16b2bfb4c0d810b24c40155dbfd113c0521e7e6ee593d704e84b4c658a1f3b \
+    --hash=sha256:a8975a7df3560c9f1e2b43dc54ebd40fd00a7017392ca5445ce7df409f900fcb
+pyasn1==0.4.5 \
+    --hash=sha256:da2420fe13a9452d8ae97a0e478adde1dee153b11ba832a95b223a2ba01c10f7 \
+    --hash=sha256:da6b43a8c9ae93bc80e2739efb38cc776ba74a886e3e9318d65fe81a8b8a2c6e \
+    # via paramiko
 pycparser==2.19 \
     --hash=sha256:a988718abfad80b6b157acce7bf130a30876d27603738ac39f140993246b25b3 \
     # via cffi
+pynacl==1.3.0 \
+    --hash=sha256:05c26f93964373fc0abe332676cb6735f0ecad27711035b9472751faa8521255 \
+    --hash=sha256:0c6100edd16fefd1557da078c7a31e7b7d7a52ce39fdca2bec29d4f7b6e7600c \
+    --hash=sha256:0d0a8171a68edf51add1e73d2159c4bc19fc0718e79dec51166e940856c2f28e \
+    --hash=sha256:1c780712b206317a746ace34c209b8c29dbfd841dfbc02aa27f2084dd3db77ae \
+    --hash=sha256:2424c8b9f41aa65bbdbd7a64e73a7450ebb4aa9ddedc6a081e7afcc4c97f7621 \
+    --hash=sha256:2d23c04e8d709444220557ae48ed01f3f1086439f12dbf11976e849a4926db56 \
+    --hash=sha256:30f36a9c70450c7878053fa1344aca0145fd47d845270b43a7ee9192a051bf39 \
+    --hash=sha256:37aa336a317209f1bb099ad177fef0da45be36a2aa664507c5d72015f956c310 \
+    --hash=sha256:4943decfc5b905748f0756fdd99d4f9498d7064815c4cf3643820c9028b711d1 \
+    --hash=sha256:57ef38a65056e7800859e5ba9e6091053cd06e1038983016effaffe0efcd594a \
+    --hash=sha256:5bd61e9b44c543016ce1f6aef48606280e45f892a928ca7068fba30021e9b786 \
+    --hash=sha256:6482d3017a0c0327a49dddc8bd1074cc730d45db2ccb09c3bac1f8f32d1eb61b \
+    --hash=sha256:7d3ce02c0784b7cbcc771a2da6ea51f87e8716004512493a2b69016326301c3b \
+    --hash=sha256:a14e499c0f5955dcc3991f785f3f8e2130ed504fa3a7f44009ff458ad6bdd17f \
+    --hash=sha256:a39f54ccbcd2757d1d63b0ec00a00980c0b382c62865b61a505163943624ab20 \
+    --hash=sha256:aabb0c5232910a20eec8563503c153a8e78bbf5459490c49ab31f6adf3f3a415 \
+    --hash=sha256:bd4ecb473a96ad0f90c20acba4f0bf0df91a4e03a1f4dd6a4bdc9ca75aa3a715 \
+    --hash=sha256:e2da3c13307eac601f3de04887624939aca8ee3c9488a0bb0eca4fb9401fc6b1 \
+    --hash=sha256:f67814c38162f4deb31f68d590771a29d5ae3b1bd64b75cf232308e5c74777e0 \
+    # via paramiko
 pypsrp==0.3.1 \
     --hash=sha256:309853380fe086090a03cc6662a778ee69b1cae355ae4a932859034fd76e9d0b \
     --hash=sha256:90f946254f547dc3493cea8493c819ab87e152a755797c93aa2668678ba8ae85
@@ -112,8 +161,8 @@
 six==1.12.0 \
     --hash=sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c \
     --hash=sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73 \
-    # via cryptography, pypsrp, python-dateutil
-urllib3==1.24.1 \
-    --hash=sha256:61bf29cada3fc2fbefad4fdf059ea4bd1b4a86d2b6d15e1c7c0b582b9752fe39 \
-    --hash=sha256:de9529817c93f27c8ccbfead6985011db27bd0ddfcdb2d86f3f663385c6a9c22 \
+    # via bcrypt, cryptography, pynacl, pypsrp, python-dateutil
+urllib3==1.24.2 \
+    --hash=sha256:4c291ca23bbb55c76518905869ef34bdd5f0e46af7afe6861e8375643ffee1a0 \
+    --hash=sha256:9a247273df709c4fedb38c711e44292304f73f39ab01beda9f6b9fc375669ac3 \
     # via botocore, requests
--- a/contrib/automation/requirements.txt.in	Mon May 20 10:08:28 2019 +0200
+++ b/contrib/automation/requirements.txt.in	Mon May 20 11:40:47 2019 -0400
@@ -1,2 +1,3 @@
 boto3
+paramiko
 pypsrp
--- a/contrib/byteify-strings.py	Mon May 20 10:08:28 2019 +0200
+++ b/contrib/byteify-strings.py	Mon May 20 11:40:47 2019 -0400
@@ -7,7 +7,7 @@
 # This software may be used and distributed according to the terms of the
 # GNU General Public License version 2 or any later version.
 
-from __future__ import absolute_import
+from __future__ import absolute_import, print_function
 
 import argparse
 import contextlib
@@ -227,4 +227,7 @@
                 process(fin, fout, opts)
 
 if __name__ == '__main__':
+    if sys.version_info.major < 3:
+        print('This script must be run under Python 3.')
+        sys.exit(3)
     main()
--- a/contrib/python3-whitelist	Mon May 20 10:08:28 2019 +0200
+++ b/contrib/python3-whitelist	Mon May 20 11:40:47 2019 -0400
@@ -4,6 +4,7 @@
 test-absorb-phase.t
 test-absorb-rename.t
 test-absorb-strip.t
+test-absorb-unfinished.t
 test-absorb.t
 test-acl.t
 test-add.t
@@ -126,6 +127,7 @@
 test-convert-svn-sink.t
 test-convert-tagsbranch-topology.t
 test-convert.t
+test-copies-in-changeset.t
 test-copies.t
 test-copy-move-merge.t
 test-copy.t
@@ -139,6 +141,7 @@
 test-debugrename.t
 test-default-push.t
 test-demandimport.py
+test-devel-warnings.t
 test-diff-antipatience.t
 test-diff-binary-file.t
 test-diff-change.t
@@ -597,6 +600,7 @@
 test-releasenotes-merging.t
 test-releasenotes-parsing.t
 test-relink.t
+test-remote-hidden.t
 test-remotefilelog-bad-configs.t
 test-remotefilelog-bgprefetch.t
 test-remotefilelog-blame.t
@@ -658,6 +662,7 @@
 test-run-tests.py
 test-run-tests.t
 test-rust-ancestor.py
+test-rust-discovery.py
 test-schemes.t
 test-serve.t
 test-server-view.t
--- a/contrib/testparseutil.py	Mon May 20 10:08:28 2019 +0200
+++ b/contrib/testparseutil.py	Mon May 20 11:40:47 2019 -0400
@@ -54,7 +54,7 @@
         return s.decode(u'latin-1')
 
     def opentext(f):
-        return open(f, 'rb')
+        return open(f, 'r')
 else:
     stdin = sys.stdin
     stdout = sys.stdout
@@ -164,14 +164,14 @@
     ...         self.matchfunc = matchfunc
     ...     def startsat(self, line):
     ...         return self.matchfunc(line)
-    >>> ambig1 = ambigmatcher(b'ambiguous #1',
-    ...                       lambda l: l.startswith(b'  $ cat '))
-    >>> ambig2 = ambigmatcher(b'ambiguous #2',
-    ...                       lambda l: l.endswith(b'<< EOF\\n'))
-    >>> lines = [b'  $ cat > foo.py << EOF\\n']
+    >>> ambig1 = ambigmatcher('ambiguous #1',
+    ...                       lambda l: l.startswith('  $ cat '))
+    >>> ambig2 = ambigmatcher('ambiguous #2',
+    ...                       lambda l: l.endswith('<< EOF\\n'))
+    >>> lines = ['  $ cat > foo.py << EOF\\n']
     >>> errors = []
     >>> matchers = [ambig1, ambig2]
-    >>> list(t for t in embedded(b'<dummy>', lines, errors, matchers))
+    >>> list(t for t in embedded('<dummy>', lines, errors, matchers))
     []
     >>> b2s(errors)
     ['<dummy>:1: ambiguous line for "ambiguous #1", "ambiguous #2"']
@@ -181,21 +181,21 @@
     ctx = filename = code = startline = None # for pyflakes
 
     for lineno, line in enumerate(lines, 1):
-        if not line.endswith(b'\n'):
-            line += b'\n' # to normalize EOF line
+        if not line.endswith('\n'):
+            line += '\n' # to normalize EOF line
         if matcher: # now, inside embedded code
             if matcher.endsat(ctx, line):
                 codeatend = matcher.codeatend(ctx, line)
                 if codeatend is not None:
                     code.append(codeatend)
                 if not matcher.ignores(ctx):
-                    yield (filename, startline, lineno, b''.join(code))
+                    yield (filename, startline, lineno, ''.join(code))
                 matcher = None
                 # DO NOT "continue", because line might start next fragment
             elif not matcher.isinside(ctx, line):
                 # this is an error of basefile
                 # (if matchers are implemented correctly)
-                errors.append(b'%s:%d: unexpected line for "%s"'
+                errors.append('%s:%d: unexpected line for "%s"'
                               % (basefile, lineno, matcher.desc))
                 # stop extracting embedded code by current 'matcher',
                 # because appearance of unexpected line might mean
@@ -218,9 +218,9 @@
         if matched:
             if len(matched) > 1:
                 # this is an error of matchers, maybe
-                errors.append(b'%s:%d: ambiguous line for %s' %
+                errors.append('%s:%d: ambiguous line for %s' %
                               (basefile, lineno,
-                               b', '.join([b'"%s"' % m.desc
+                               ', '.join(['"%s"' % m.desc
                                            for m, c in matched])))
                 # omit extracting embedded code, because choosing
                 # arbitrary matcher from matched ones might fail to
@@ -239,20 +239,20 @@
     if matcher:
         # examine whether EOF ends embedded code, because embedded
         # code isn't yet ended explicitly
-        if matcher.endsat(ctx, b'\n'):
-            codeatend = matcher.codeatend(ctx, b'\n')
+        if matcher.endsat(ctx, '\n'):
+            codeatend = matcher.codeatend(ctx, '\n')
             if codeatend is not None:
                 code.append(codeatend)
             if not matcher.ignores(ctx):
-                yield (filename, startline, lineno + 1, b''.join(code))
+                yield (filename, startline, lineno + 1, ''.join(code))
         else:
             # this is an error of basefile
             # (if matchers are implemented correctly)
-            errors.append(b'%s:%d: unexpected end of file for "%s"'
+            errors.append('%s:%d: unexpected end of file for "%s"'
                           % (basefile, lineno, matcher.desc))
 
 # heredoc limit mark to ignore embedded code at check-code.py or so
-heredocignorelimit = b'NO_CHECK_EOF'
+heredocignorelimit = 'NO_CHECK_EOF'
 
 # the pattern to match against cases below, and to return a limit mark
 # string as 'lname' group
@@ -260,47 +260,47 @@
 # - << LIMITMARK
 # - << "LIMITMARK"
 # - << 'LIMITMARK'
-heredoclimitpat = br'\s*<<\s*(?P<lquote>["\']?)(?P<limit>\w+)(?P=lquote)'
+heredoclimitpat = r'\s*<<\s*(?P<lquote>["\']?)(?P<limit>\w+)(?P=lquote)'
 
 class fileheredocmatcher(embeddedmatcher):
     """Detect "cat > FILE << LIMIT" style embedded code
 
     >>> matcher = fileheredocmatcher(b'heredoc .py file', br'[^<]+\\.py')
-    >>> b2s(matcher.startsat(b'  $ cat > file.py << EOF\\n'))
+    >>> b2s(matcher.startsat('  $ cat > file.py << EOF\\n'))
     ('file.py', '  > EOF\\n')
-    >>> b2s(matcher.startsat(b'  $ cat   >>file.py   <<EOF\\n'))
+    >>> b2s(matcher.startsat('  $ cat   >>file.py   <<EOF\\n'))
     ('file.py', '  > EOF\\n')
-    >>> b2s(matcher.startsat(b'  $ cat>  \\x27any file.py\\x27<<  "EOF"\\n'))
+    >>> b2s(matcher.startsat('  $ cat>  \\x27any file.py\\x27<<  "EOF"\\n'))
     ('any file.py', '  > EOF\\n')
-    >>> b2s(matcher.startsat(b"  $ cat > file.py << 'ANYLIMIT'\\n"))
+    >>> b2s(matcher.startsat("  $ cat > file.py << 'ANYLIMIT'\\n"))
     ('file.py', '  > ANYLIMIT\\n')
-    >>> b2s(matcher.startsat(b'  $ cat<<ANYLIMIT>"file.py"\\n'))
+    >>> b2s(matcher.startsat('  $ cat<<ANYLIMIT>"file.py"\\n'))
     ('file.py', '  > ANYLIMIT\\n')
-    >>> start = b'  $ cat > file.py << EOF\\n'
+    >>> start = '  $ cat > file.py << EOF\\n'
     >>> ctx = matcher.startsat(start)
     >>> matcher.codeatstart(ctx, start)
     >>> b2s(matcher.filename(ctx))
     'file.py'
     >>> matcher.ignores(ctx)
     False
-    >>> inside = b'  > foo = 1\\n'
+    >>> inside = '  > foo = 1\\n'
     >>> matcher.endsat(ctx, inside)
     False
     >>> matcher.isinside(ctx, inside)
     True
     >>> b2s(matcher.codeinside(ctx, inside))
     'foo = 1\\n'
-    >>> end = b'  > EOF\\n'
+    >>> end = '  > EOF\\n'
     >>> matcher.endsat(ctx, end)
     True
     >>> matcher.codeatend(ctx, end)
-    >>> matcher.endsat(ctx, b'  > EOFEOF\\n')
+    >>> matcher.endsat(ctx, '  > EOFEOF\\n')
     False
-    >>> ctx = matcher.startsat(b'  $ cat > file.py << NO_CHECK_EOF\\n')
+    >>> ctx = matcher.startsat('  $ cat > file.py << NO_CHECK_EOF\\n')
     >>> matcher.ignores(ctx)
     True
     """
-    _prefix = b'  > '
+    _prefix = '  > '
 
     def __init__(self, desc, namepat):
         super(fileheredocmatcher, self).__init__(desc)
@@ -312,13 +312,13 @@
         # - > NAMEPAT
         # - > "NAMEPAT"
         # - > 'NAMEPAT'
-        namepat = (br'\s*>>?\s*(?P<nquote>["\']?)(?P<name>%s)(?P=nquote)'
+        namepat = (r'\s*>>?\s*(?P<nquote>["\']?)(?P<name>%s)(?P=nquote)'
                    % namepat)
         self._fileres = [
             # "cat > NAME << LIMIT" case
-            re.compile(br'  \$ \s*cat' + namepat + heredoclimitpat),
+            re.compile(r'  \$ \s*cat' + namepat + heredoclimitpat),
             # "cat << LIMIT > NAME" case
-            re.compile(br'  \$ \s*cat' + heredoclimitpat + namepat),
+            re.compile(r'  \$ \s*cat' + heredoclimitpat + namepat),
         ]
 
     def startsat(self, line):
@@ -327,7 +327,7 @@
             matched = filere.match(line)
             if matched:
                 return (matched.group('name'),
-                        b'  > %s\n' % matched.group('limit'))
+                        '  > %s\n' % matched.group('limit'))
 
     def endsat(self, ctx, line):
         return ctx[1] == line
@@ -336,7 +336,7 @@
         return line.startswith(self._prefix)
 
     def ignores(self, ctx):
-        return b'  > %s\n' % heredocignorelimit == ctx[1]
+        return '  > %s\n' % heredocignorelimit == ctx[1]
 
     def filename(self, ctx):
         return ctx[0]
@@ -357,10 +357,10 @@
     """Detect ">>> code" style embedded python code
 
     >>> matcher = pydoctestmatcher()
-    >>> startline = b'  >>> foo = 1\\n'
+    >>> startline = '  >>> foo = 1\\n'
     >>> matcher.startsat(startline)
     True
-    >>> matcher.startsat(b'  ... foo = 1\\n')
+    >>> matcher.startsat('  ... foo = 1\\n')
     False
     >>> ctx = matcher.startsat(startline)
     >>> matcher.filename(ctx)
@@ -368,45 +368,45 @@
     False
     >>> b2s(matcher.codeatstart(ctx, startline))
     'foo = 1\\n'
-    >>> inside = b'  >>> foo = 1\\n'
+    >>> inside = '  >>> foo = 1\\n'
     >>> matcher.endsat(ctx, inside)
     False
     >>> matcher.isinside(ctx, inside)
     True
     >>> b2s(matcher.codeinside(ctx, inside))
     'foo = 1\\n'
-    >>> inside = b'  ... foo = 1\\n'
+    >>> inside = '  ... foo = 1\\n'
     >>> matcher.endsat(ctx, inside)
     False
     >>> matcher.isinside(ctx, inside)
     True
     >>> b2s(matcher.codeinside(ctx, inside))
     'foo = 1\\n'
-    >>> inside = b'  expected output\\n'
+    >>> inside = '  expected output\\n'
     >>> matcher.endsat(ctx, inside)
     False
     >>> matcher.isinside(ctx, inside)
     True
     >>> b2s(matcher.codeinside(ctx, inside))
     '\\n'
-    >>> inside = b'  \\n'
+    >>> inside = '  \\n'
     >>> matcher.endsat(ctx, inside)
     False
     >>> matcher.isinside(ctx, inside)
     True
     >>> b2s(matcher.codeinside(ctx, inside))
     '\\n'
-    >>> end = b'  $ foo bar\\n'
+    >>> end = '  $ foo bar\\n'
     >>> matcher.endsat(ctx, end)
     True
     >>> matcher.codeatend(ctx, end)
-    >>> end = b'\\n'
+    >>> end = '\\n'
     >>> matcher.endsat(ctx, end)
     True
     >>> matcher.codeatend(ctx, end)
     """
-    _prefix = b'  >>> '
-    _prefixre = re.compile(br'  (>>>|\.\.\.) ')
+    _prefix = '  >>> '
+    _prefixre = re.compile(r'  (>>>|\.\.\.) ')
 
     # If a line matches against not _prefixre but _outputre, that line
     # is "an expected output line" (= not a part of code fragment).
@@ -416,10 +416,10 @@
     # run-tests.py. But "directive line inside inline python code"
     # should be rejected by Mercurial reviewers. Therefore, this
     # regexp does not matche against such directive lines.
-    _outputre = re.compile(br'  $|  [^$]')
+    _outputre = re.compile(r'  $|  [^$]')
 
     def __init__(self):
-        super(pydoctestmatcher, self).__init__(b"doctest style python code")
+        super(pydoctestmatcher, self).__init__("doctest style python code")
 
     def startsat(self, line):
         # ctx is "True"
@@ -446,57 +446,57 @@
     def codeinside(self, ctx, line):
         if self._prefixre.match(line):
             return line[len(self._prefix):] # strip prefix '  >>> '/'  ... '
-        return b'\n' # an expected output line is treated as an empty line
+        return '\n' # an expected output line is treated as an empty line
 
 class pyheredocmatcher(embeddedmatcher):
     """Detect "python << LIMIT" style embedded python code
 
     >>> matcher = pyheredocmatcher()
-    >>> b2s(matcher.startsat(b'  $ python << EOF\\n'))
+    >>> b2s(matcher.startsat('  $ python << EOF\\n'))
     '  > EOF\\n'
-    >>> b2s(matcher.startsat(b'  $ $PYTHON   <<EOF\\n'))
+    >>> b2s(matcher.startsat('  $ $PYTHON   <<EOF\\n'))
     '  > EOF\\n'
-    >>> b2s(matcher.startsat(b'  $ "$PYTHON"<<  "EOF"\\n'))
+    >>> b2s(matcher.startsat('  $ "$PYTHON"<<  "EOF"\\n'))
     '  > EOF\\n'
-    >>> b2s(matcher.startsat(b"  $ $PYTHON << 'ANYLIMIT'\\n"))
+    >>> b2s(matcher.startsat("  $ $PYTHON << 'ANYLIMIT'\\n"))
     '  > ANYLIMIT\\n'
-    >>> matcher.startsat(b'  $ "$PYTHON" < EOF\\n')
-    >>> start = b'  $ python << EOF\\n'
+    >>> matcher.startsat('  $ "$PYTHON" < EOF\\n')
+    >>> start = '  $ python << EOF\\n'
     >>> ctx = matcher.startsat(start)
     >>> matcher.codeatstart(ctx, start)
     >>> matcher.filename(ctx)
     >>> matcher.ignores(ctx)
     False
-    >>> inside = b'  > foo = 1\\n'
+    >>> inside = '  > foo = 1\\n'
     >>> matcher.endsat(ctx, inside)
     False
     >>> matcher.isinside(ctx, inside)
     True
     >>> b2s(matcher.codeinside(ctx, inside))
     'foo = 1\\n'
-    >>> end = b'  > EOF\\n'
+    >>> end = '  > EOF\\n'
     >>> matcher.endsat(ctx, end)
     True
     >>> matcher.codeatend(ctx, end)
-    >>> matcher.endsat(ctx, b'  > EOFEOF\\n')
+    >>> matcher.endsat(ctx, '  > EOFEOF\\n')
     False
-    >>> ctx = matcher.startsat(b'  $ python << NO_CHECK_EOF\\n')
+    >>> ctx = matcher.startsat('  $ python << NO_CHECK_EOF\\n')
     >>> matcher.ignores(ctx)
     True
     """
-    _prefix = b'  > '
+    _prefix = '  > '
 
-    _startre = re.compile(br'  \$ (\$PYTHON|"\$PYTHON"|python).*' +
+    _startre = re.compile(r'  \$ (\$PYTHON|"\$PYTHON"|python).*' +
                           heredoclimitpat)
 
     def __init__(self):
-        super(pyheredocmatcher, self).__init__(b"heredoc python invocation")
+        super(pyheredocmatcher, self).__init__("heredoc python invocation")
 
     def startsat(self, line):
         # ctx is END-LINE-OF-EMBEDDED-CODE
         matched = self._startre.match(line)
         if matched:
-            return b'  > %s\n' % matched.group('limit')
+            return '  > %s\n' % matched.group('limit')
 
     def endsat(self, ctx, line):
         return ctx == line
@@ -505,7 +505,7 @@
         return line.startswith(self._prefix)
 
     def ignores(self, ctx):
-        return b'  > %s\n' % heredocignorelimit == ctx
+        return '  > %s\n' % heredocignorelimit == ctx
 
     def filename(self, ctx):
         return None # no filename
@@ -524,7 +524,7 @@
     pyheredocmatcher(),
     # use '[^<]+' instead of '\S+', in order to match against
     # paths including whitespaces
-    fileheredocmatcher(b'heredoc .py file', br'[^<]+\.py'),
+    fileheredocmatcher('heredoc .py file', r'[^<]+\.py'),
 ]
 
 def pyembedded(basefile, lines, errors):
@@ -536,7 +536,7 @@
 _shmatchers = [
     # use '[^<]+' instead of '\S+', in order to match against
     # paths including whitespaces
-    fileheredocmatcher(b'heredoc .sh file', br'[^<]+\.sh'),
+    fileheredocmatcher('heredoc .sh file', r'[^<]+\.sh'),
 ]
 
 def shembedded(basefile, lines, errors):
@@ -548,8 +548,8 @@
 _hgrcmatchers = [
     # use '[^<]+' instead of '\S+', in order to match against
     # paths including whitespaces
-    fileheredocmatcher(b'heredoc hgrc file',
-                       br'(([^/<]+/)+hgrc|\$HGRCPATH|\${HGRCPATH})'),
+    fileheredocmatcher('heredoc hgrc file',
+                       r'(([^/<]+/)+hgrc|\$HGRCPATH|\${HGRCPATH})'),
 ]
 
 def hgrcembedded(basefile, lines, errors):
@@ -565,14 +565,14 @@
         errors = []
         for name, starts, ends, code in embeddedfunc(basefile, lines, errors):
             if not name:
-                name = b'<anonymous>'
-            writeout(b"%s:%d: %s starts\n" % (basefile, starts, name))
+                name = '<anonymous>'
+            writeout("%s:%d: %s starts\n" % (basefile, starts, name))
             if opts.verbose and code:
-                writeout(b"  |%s\n" %
-                         b"\n  |".join(l for l in code.splitlines()))
-            writeout(b"%s:%d: %s ends\n" % (basefile, ends, name))
+                writeout("  |%s\n" %
+                         "\n  |".join(l for l in code.splitlines()))
+            writeout("%s:%d: %s ends\n" % (basefile, ends, name))
         for e in errors:
-            writeerr(b"%s\n" % e)
+            writeerr("%s\n" % e)
         return len(errors)
 
     def applyembedded(args, embeddedfunc, opts):
@@ -580,11 +580,11 @@
         if args:
             for f in args:
                 with opentext(f) as fp:
-                    if showembedded(bytestr(f), fp, embeddedfunc, opts):
+                    if showembedded(f, fp, embeddedfunc, opts):
                         ret = 1
         else:
             lines = [l for l in stdin.readlines()]
-            if showembedded(b'<stdin>', lines, embeddedfunc, opts):
+            if showembedded('<stdin>', lines, embeddedfunc, opts):
                 ret = 1
         return ret
 
--- a/doc/gendoc.py	Mon May 20 10:08:28 2019 +0200
+++ b/doc/gendoc.py	Mon May 20 11:40:47 2019 -0400
@@ -120,7 +120,7 @@
 
     # print cmds
     ui.write(minirst.section(_(b"Commands")))
-    commandprinter(ui, table, minirst.subsection)
+    commandprinter(ui, table, minirst.subsection, minirst.subsubsection)
 
     # print help topics
     # The config help topic is included in the hgrc.5 man page.
@@ -143,7 +143,8 @@
         cmdtable = getattr(mod, 'cmdtable', None)
         if cmdtable:
             ui.write(minirst.subsubsection(_(b'Commands')))
-            commandprinter(ui, cmdtable, minirst.subsubsubsection)
+            commandprinter(ui, cmdtable, minirst.subsubsubsection,
+                    minirst.subsubsubsubsection)
 
 def showtopic(ui, topic):
     extrahelptable = [
@@ -177,7 +178,27 @@
         ui.write(doc)
         ui.write(b"\n")
 
-def commandprinter(ui, cmdtable, sectionfunc):
+def commandprinter(ui, cmdtable, sectionfunc, subsectionfunc):
+    """Render restructuredtext describing a list of commands and their
+    documentations, grouped by command category.
+
+    Args:
+      ui: UI object to write the output to
+      cmdtable: a dict that maps a string of the command name plus its aliases
+        (separated with pipes) to a 3-tuple of (the command's function, a list
+        of its option descriptions, and a string summarizing available
+        options). Example, with aliases added for demonstration purposes:
+
+          'phase|alias1|alias2': (
+             <function phase at 0x7f0816b05e60>,
+             [ ('p', 'public', False, 'set changeset phase to public'),
+               ...,
+               ('r', 'rev', [], 'target revision', 'REV')],
+             '[-p|-d|-s] [-f] [-r] [REV...]'
+          )
+      sectionfunc: minirst function to format command category headers
+      subsectionfunc: minirst function to format command headers
+    """
     h = {}
     for c, attr in cmdtable.items():
         f = c.split(b"|")[0]
@@ -185,45 +206,76 @@
         h[f] = c
     cmds = h.keys()
 
-    for f in sorted(cmds):
-        if f.startswith(b"debug"):
+    def helpcategory(cmd):
+        """Given a canonical command name from `cmds` (above), retrieve its
+        help category. If helpcategory is None, default to CATEGORY_NONE.
+        """
+        fullname = h[cmd]
+        details = cmdtable[fullname]
+        helpcategory = details[0].helpcategory
+        return helpcategory or help.registrar.command.CATEGORY_NONE
+
+    cmdsbycategory = {category: [] for category in help.CATEGORY_ORDER}
+    for cmd in cmds:
+        # If a command category wasn't registered, the command won't get
+        # rendered below, so we raise an AssertionError.
+        if helpcategory(cmd) not in cmdsbycategory:
+            raise AssertionError(
+                "The following command did not register its (category) in "
+                "help.CATEGORY_ORDER: %s (%s)" % (cmd, helpcategory(cmd)))
+        cmdsbycategory[helpcategory(cmd)].append(cmd)
+
+    # Print the help for each command. We present the commands grouped by
+    # category, and we use help.CATEGORY_ORDER as a guide for a helpful order
+    # in which to present the categories.
+    for category in help.CATEGORY_ORDER:
+        categorycmds = cmdsbycategory[category]
+        if not categorycmds:
+            # Skip empty categories
             continue
-        d = get_cmd(h[f], cmdtable)
-        ui.write(sectionfunc(d[b'cmd']))
-        # short description
-        ui.write(d[b'desc'][0])
-        # synopsis
-        ui.write(b"::\n\n")
-        synopsislines = d[b'synopsis'].splitlines()
-        for line in synopsislines:
-            # some commands (such as rebase) have a multi-line
+        # Print a section header for the category.
+        # For now, the category header is at the same level as the headers for
+        # the commands in the category; this is fixed in the next commit.
+        ui.write(sectionfunc(help.CATEGORY_NAMES[category]))
+        # Print each command in the category
+        for f in sorted(categorycmds):
+            if f.startswith(b"debug"):
+                continue
+            d = get_cmd(h[f], cmdtable)
+            ui.write(subsectionfunc(d[b'cmd']))
+            # short description
+            ui.write(d[b'desc'][0])
             # synopsis
-            ui.write(b"   %s\n" % line)
-        ui.write(b'\n')
-        # description
-        ui.write(b"%s\n\n" % d[b'desc'][1])
-        # options
-        opt_output = list(d[b'opts'])
-        if opt_output:
-            opts_len = max([len(line[0]) for line in opt_output])
-            ui.write(_(b"Options:\n\n"))
-            multioccur = False
-            for optstr, desc in opt_output:
-                if desc:
-                    s = b"%-*s  %s" % (opts_len, optstr, desc)
-                else:
-                    s = optstr
-                ui.write(b"%s\n" % s)
-                if optstr.endswith(b"[+]>"):
-                    multioccur = True
-            if multioccur:
-                ui.write(_(b"\n[+] marked option can be specified"
-                           b" multiple times\n"))
-            ui.write(b"\n")
-        # aliases
-        if d[b'aliases']:
-            ui.write(_(b"    aliases: %s\n\n") % b" ".join(d[b'aliases']))
-
+            ui.write(b"::\n\n")
+            synopsislines = d[b'synopsis'].splitlines()
+            for line in synopsislines:
+                # some commands (such as rebase) have a multi-line
+                # synopsis
+                ui.write(b"   %s\n" % line)
+            ui.write(b'\n')
+            # description
+            ui.write(b"%s\n\n" % d[b'desc'][1])
+            # options
+            opt_output = list(d[b'opts'])
+            if opt_output:
+                opts_len = max([len(line[0]) for line in opt_output])
+                ui.write(_(b"Options:\n\n"))
+                multioccur = False
+                for optstr, desc in opt_output:
+                    if desc:
+                        s = b"%-*s  %s" % (opts_len, optstr, desc)
+                    else:
+                        s = optstr
+                    ui.write(b"%s\n" % s)
+                    if optstr.endswith(b"[+]>"):
+                        multioccur = True
+                if multioccur:
+                    ui.write(_(b"\n[+] marked option can be specified"
+                               b" multiple times\n"))
+                ui.write(b"\n")
+            # aliases
+            if d[b'aliases']:
+                ui.write(_(b"    aliases: %s\n\n") % b" ".join(d[b'aliases']))
 
 def allextensionnames():
     return set(extensions.enabled().keys()) | set(extensions.disabled().keys())
--- a/hgext/absorb.py	Mon May 20 10:08:28 2019 +0200
+++ b/hgext/absorb.py	Mon May 20 11:40:47 2019 -0400
@@ -914,7 +914,10 @@
     """
     if stack is None:
         limit = ui.configint('absorb', 'max-stack-size')
-        stack = getdraftstack(repo['.'], limit)
+        headctx = repo['.']
+        if len(headctx.parents()) > 1:
+            raise error.Abort(_('cannot absorb into a merge'))
+        stack = getdraftstack(headctx, limit)
         if limit and len(stack) >= limit:
             ui.warn(_('absorb: only the recent %d changesets will '
                       'be analysed\n')
--- a/hgext/convert/monotone.py	Mon May 20 10:08:28 2019 +0200
+++ b/hgext/convert/monotone.py	Mon May 20 11:40:47 2019 -0400
@@ -284,9 +284,9 @@
                     # d2 => d3
                     ignoremove[tofile] = 1
             for tofile, fromfile in renamed.items():
-                self.ui.debug (_("copying file in renamed directory "
-                                 "from '%s' to '%s'")
-                               % (fromfile, tofile), '\n')
+                self.ui.debug(
+                    "copying file in renamed directory from '%s' to '%s'"
+                    % (fromfile, tofile), '\n')
                 files[tofile] = rev
                 copies[tofile] = fromfile
             for fromfile in renamed.values():
@@ -370,4 +370,3 @@
             self.mtnwritefp = None
             self.mtnreadfp.close()
             self.mtnreadfp = None
-
--- a/hgext/fix.py	Mon May 20 10:08:28 2019 +0200
+++ b/hgext/fix.py	Mon May 20 11:40:47 2019 -0400
@@ -72,12 +72,43 @@
 To account for changes made by each tool, the line numbers used for incremental
 formatting are recomputed before executing the next tool. So, each tool may see
 different values for the arguments added by the :linerange suboption.
+
+Each fixer tool is allowed to return some metadata in addition to the fixed file
+content. The metadata must be placed before the file content on stdout,
+separated from the file content by a zero byte. The metadata is parsed as a JSON
+value (so, it should be UTF-8 encoded and contain no zero bytes). A fixer tool
+is expected to produce this metadata encoding if and only if the :metadata
+suboption is true::
+
+  [fix]
+  tool:command = tool --prepend-json-metadata
+  tool:metadata = true
+
+The metadata values are passed to hooks, which can be used to print summaries or
+perform other post-fixing work. The supported hooks are::
+
+  "postfixfile"
+    Run once for each file in each revision where any fixer tools made changes
+    to the file content. Provides "$HG_REV" and "$HG_PATH" to identify the file,
+    and "$HG_METADATA" with a map of fixer names to metadata values from fixer
+    tools that affected the file. Fixer tools that didn't affect the file have a
+    valueof None. Only fixer tools that executed are present in the metadata.
+
+  "postfix"
+    Run once after all files and revisions have been handled. Provides
+    "$HG_REPLACEMENTS" with information about what revisions were created and
+    made obsolete. Provides a boolean "$HG_WDIRWRITTEN" to indicate whether any
+    files in the working copy were updated. Provides a list "$HG_METADATA"
+    mapping fixer tool names to lists of metadata values returned from
+    executions that modified a file. This aggregates the same metadata
+    previously passed to the "postfixfile" hook.
 """
 
 from __future__ import absolute_import
 
 import collections
 import itertools
+import json
 import os
 import re
 import subprocess
@@ -117,13 +148,14 @@
 configtable = {}
 configitem = registrar.configitem(configtable)
 
-# Register the suboptions allowed for each configured fixer.
+# Register the suboptions allowed for each configured fixer, and default values.
 FIXER_ATTRS = {
     'command': None,
     'linerange': None,
     'fileset': None,
     'pattern': None,
     'priority': 0,
+    'metadata': False,
 }
 
 for key, default in FIXER_ATTRS.items():
@@ -201,10 +233,12 @@
             for rev, path in items:
                 ctx = repo[rev]
                 olddata = ctx[path].data()
-                newdata = fixfile(ui, opts, fixers, ctx, path, basectxs[rev])
+                metadata, newdata = fixfile(ui, opts, fixers, ctx, path,
+                                            basectxs[rev])
                 # Don't waste memory/time passing unchanged content back, but
                 # produce one result per item either way.
-                yield (rev, path, newdata if newdata != olddata else None)
+                yield (rev, path, metadata,
+                       newdata if newdata != olddata else None)
         results = worker.worker(ui, 1.0, getfixes, tuple(), workqueue,
                                 threadsafe=False)
 
@@ -215,15 +249,25 @@
         # the tests deterministic. It might also be considered a feature since
         # it makes the results more easily reproducible.
         filedata = collections.defaultdict(dict)
+        aggregatemetadata = collections.defaultdict(list)
         replacements = {}
         wdirwritten = False
         commitorder = sorted(revstofix, reverse=True)
         with ui.makeprogress(topic=_('fixing'), unit=_('files'),
                              total=sum(numitems.values())) as progress:
-            for rev, path, newdata in results:
+            for rev, path, filerevmetadata, newdata in results:
                 progress.increment(item=path)
+                for fixername, fixermetadata in filerevmetadata.items():
+                    aggregatemetadata[fixername].append(fixermetadata)
                 if newdata is not None:
                     filedata[rev][path] = newdata
+                    hookargs = {
+                      'rev': rev,
+                      'path': path,
+                      'metadata': filerevmetadata,
+                    }
+                    repo.hook('postfixfile', throw=False,
+                              **pycompat.strkwargs(hookargs))
                 numitems[rev] -= 1
                 # Apply the fixes for this and any other revisions that are
                 # ready and sitting at the front of the queue. Using a loop here
@@ -240,6 +284,12 @@
                     del filedata[rev]
 
         cleanup(repo, replacements, wdirwritten)
+        hookargs = {
+            'replacements': replacements,
+            'wdirwritten': wdirwritten,
+            'metadata': aggregatemetadata,
+        }
+        repo.hook('postfix', throw=True, **pycompat.strkwargs(hookargs))
 
 def cleanup(repo, replacements, wdirwritten):
     """Calls scmutil.cleanupnodes() with the given replacements.
@@ -491,6 +541,7 @@
     A fixer tool's stdout will become the file's new content if and only if it
     exits with code zero.
     """
+    metadata = {}
     newdata = fixctx[path].data()
     for fixername, fixer in fixers.iteritems():
         if fixer.affects(opts, fixctx, path):
@@ -506,9 +557,20 @@
                 stdin=subprocess.PIPE,
                 stdout=subprocess.PIPE,
                 stderr=subprocess.PIPE)
-            newerdata, stderr = proc.communicate(newdata)
+            stdout, stderr = proc.communicate(newdata)
             if stderr:
                 showstderr(ui, fixctx.rev(), fixername, stderr)
+            newerdata = stdout
+            if fixer.shouldoutputmetadata():
+                try:
+                    metadatajson, newerdata = stdout.split('\0', 1)
+                    metadata[fixername] = json.loads(metadatajson)
+                except ValueError:
+                    ui.warn(_('ignored invalid output from fixer tool: %s\n') %
+                            (fixername,))
+                    continue
+            else:
+                metadata[fixername] = None
             if proc.returncode == 0:
                 newdata = newerdata
             else:
@@ -519,7 +581,7 @@
                     ui, _('no fixes will be applied'),
                     hint=_('use --config fix.failure=continue to apply any '
                            'successful fixes anyway'))
-    return newdata
+    return metadata, newdata
 
 def showstderr(ui, rev, fixername, stderr):
     """Writes the lines of the stderr string as warnings on the ui
@@ -667,6 +729,10 @@
         """Should this fixer run on the file at the given path and context?"""
         return scmutil.match(fixctx, [self._pattern], opts)(path)
 
+    def shouldoutputmetadata(self):
+        """Should the stdout of this fixer start with JSON and a null byte?"""
+        return self._metadata
+
     def command(self, ui, path, rangesfn):
         """A shell command to use to invoke this fixer on the given file/lines
 
--- a/hgext/gpg.py	Mon May 20 10:08:28 2019 +0200
+++ b/hgext/gpg.py	Mon May 20 11:40:47 2019 -0400
@@ -49,6 +49,11 @@
 
 # Custom help category
 _HELP_CATEGORY = 'gpg'
+help.CATEGORY_ORDER.insert(
+    help.CATEGORY_ORDER.index(registrar.command.CATEGORY_HELP),
+    _HELP_CATEGORY
+)
+help.CATEGORY_NAMES[_HELP_CATEGORY] = 'Signing changes (GPG)'
 
 class gpg(object):
     def __init__(self, path, key=None):
--- a/hgext/histedit.py	Mon May 20 10:08:28 2019 +0200
+++ b/hgext/histedit.py	Mon May 20 11:40:47 2019 -0400
@@ -1079,6 +1079,8 @@
 def changemode(state, mode):
     curmode, _ = state['mode']
     state['mode'] = (mode, curmode)
+    if mode == MODE_PATCH:
+        state['modes'][MODE_PATCH]['patchcontents'] = patchcontents(state)
 
 def makeselection(state, pos):
     state['selected'] = pos
@@ -1134,7 +1136,7 @@
     if mode != MODE_PATCH:
         return
     mode_state = state['modes'][mode]
-    num_lines = len(patchcontents(state))
+    num_lines = len(mode_state['patchcontents'])
     page_height = state['page_height']
     unit = page_height if unit == 'page' else 1
     num_pages = 1 + (num_lines - 1) / page_height
@@ -1227,15 +1229,25 @@
     else:
         win.addstr(y, x, line)
 
+def _trunc_head(line, n):
+    if len(line) <= n:
+        return line
+    return '> ' + line[-(n - 2):]
+def _trunc_tail(line, n):
+    if len(line) <= n:
+        return line
+    return line[:n - 2] + ' >'
+
 def patchcontents(state):
     repo = state['repo']
     rule = state['rules'][state['pos']]
-    repo.ui.verbose = True
     displayer = logcmdutil.changesetdisplayer(repo.ui, repo, {
         "patch": True,  "template": "status"
     }, buffered=True)
-    displayer.show(rule.ctx)
-    displayer.close()
+    overrides = {('ui',  'verbose'): True}
+    with repo.ui.configoverride(overrides, source='histedit'):
+        displayer.show(rule.ctx)
+        displayer.close()
     return displayer.hunk[rule.ctx.rev()].splitlines()
 
 def _chisteditmain(repo, rules, stdscr):
@@ -1283,11 +1295,23 @@
         line = "bookmark:  {0}".format(' '.join(bms))
         win.addstr(3, 1, line[:length])
 
-        line = "files:     {0}".format(','.join(ctx.files()))
+        line = "summary:   {0}".format(ctx.description().splitlines()[0])
         win.addstr(4, 1, line[:length])
 
-        line = "summary:   {0}".format(ctx.description().splitlines()[0])
-        win.addstr(5, 1, line[:length])
+        line = "files:     "
+        win.addstr(5, 1, line)
+        fnx = 1 + len(line)
+        fnmaxx = length - fnx + 1
+        y = 5
+        fnmaxn = maxy - (1 + y) - 1
+        files = ctx.files()
+        for i, line1 in enumerate(files):
+            if len(files) > fnmaxn and i == fnmaxn - 1:
+                win.addstr(y, fnx, _trunc_tail(','.join(files[i:]), fnmaxx))
+                y = y + 1
+                break
+            win.addstr(y, fnx, _trunc_head(line1, fnmaxx))
+            y = y + 1
 
         conflicts = rule.conflicts
         if len(conflicts) > 0:
@@ -1296,7 +1320,7 @@
         else:
             conflictstr = 'no overlap'
 
-        win.addstr(6, 1, conflictstr[:length])
+        win.addstr(y, 1, conflictstr[:length])
         win.noutrefresh()
 
     def helplines(mode):
@@ -1372,15 +1396,16 @@
 
     def renderpatch(win, state):
         start = state['modes'][MODE_PATCH]['line_offset']
-        renderstring(win, state, patchcontents(state)[start:], diffcolors=True)
+        content = state['modes'][MODE_PATCH]['patchcontents']
+        renderstring(win, state, content[start:], diffcolors=True)
 
     def layout(mode):
         maxy, maxx = stdscr.getmaxyx()
         helplen = len(helplines(mode))
         return {
-            'commit': (8, maxx),
+            'commit': (12, maxx),
             'help': (helplen, maxx),
-            'main': (maxy - helplen - 8, maxx),
+            'main': (maxy - helplen - 12, maxx),
         }
 
     def drawvertwin(size, y, x):
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/hgext/lfs/TODO.rst	Mon May 20 11:40:47 2019 -0400
@@ -0,0 +1,195 @@
+Prior to removing (EXPERIMENTAL)
+--------------------------------
+
+These things affect UI and/or behavior, and should probably be implemented (or
+ruled out) prior to taking off the experimental shrinkwrap.
+
+#. Finish the `hg convert` story
+
+   * Add an argument to accept a rules file to apply during conversion?
+     Currently `lfs.track` is the only way to affect the conversion.
+   * drop `lfs.track` config settings
+   * splice in `.hglfs` file for normal repo -> lfs conversions?
+
+#. Stop uploading blobs when pushing between local repos
+
+   * Could probably hardlink directly to the other local repo's store
+   * Support inferring `lfs.url` for local push/pull (currently only supports
+     http)
+
+#. Stop uploading blobs on strip/amend/histedit/etc.
+
+   * This seems to be a side effect of doing it for `hg bundle`, which probably
+     makes sense.
+
+#. Handle a server with the extension loaded and a client without the extension
+   more gracefully.
+
+   * `changegroup3` is still experimental, and not enabled by default.
+   * Figure out how to `introduce LFS to the server repo
+     <https://www.mercurial-scm.org/pipermail/mercurial-devel/2018-September/122281.html>`_.
+     See the TODO in test-lfs-serve.t.
+
+#. Remove `lfs.retry` hack in client?  This came from FB, but it's not clear why
+   it is/was needed.
+
+#. `hg export` currently writes out the LFS blob.  Should it write the pointer
+   instead?
+
+   * `hg diff` is similar, and probably shouldn't see the pointer file
+
+#. `Fix https multiplexing, and re-enable workers
+   <https://www.mercurial-scm.org/pipermail/mercurial-devel/2018-January/109916.html>`_.
+
+#. Show to-be-applied rules with `hg files -r 'wdir()' 'set:lfs()'`
+
+   * `debugignore` can show file + line number, so a dedicated command could be
+     useful too.
+
+#. Filesets, revsets and templates
+
+   * A dedicated revset should be faster than `'file(set:lfs())'`
+   * Attach `{lfsoid}` and `{lfspointer}` to `general keywords
+     <https://www.mercurial-scm.org/pipermail/mercurial-devel/2018-January/110251.html>`_,
+     IFF the file is a blob
+   * Drop existing items that would be redundant with general support
+
+#. Can `grep` avoid downloading most things?
+
+   * Add a command option to skip LFS blobs?
+
+#. Add a flag that's visible in `hg files -v` to indicate external storage?
+
+#. Server side issues
+
+   * Check for local disk space before allowing upload.  (I've got a patch for
+     this.)
+   * Make sure the http codes used are appropriate.
+   * `Why is copying the Authorization header into the JSON payload necessary
+     <https://www.mercurial-scm.org/pipermail/mercurial-devel/2018-April/116230.html>`_?
+   * `LFS-Authenticate` header support in client and server(?)
+
+#. Add locks on cache and blob store
+
+   * This is complicated with a global store, and multiple potentially unrelated
+     local repositories that reference the same blob.
+   * Alternately, maybe just handle collisions when trying to create the same
+     blob in the store somehow.
+
+#. Are proper file sizes reported in `debugupgraderepo`?
+
+#. Finish prefetching files
+
+   * `-T {rawdata}`
+   * `verify`
+   * `grep`
+
+#. Output cleanup
+
+   * Can we print the url when connecting to the blobstore?  (A sudden
+     connection refused after pulling commits looks confusing.)  Problem is,
+     'pushing to main url' is printed, and then lfs wants to upload before going
+     back to the main repo transfer, so then *that* could be confusing with
+     extra output. (This is kinda improved with 380f5131ee7b and 9f78d10742af.)
+
+   * Add more progress indicators?  Uploading a large repo looks idle for a long
+     time while it scans for blobs in each outgoing revision.
+
+   * Print filenames instead of hashes in error messages
+
+     * subrepo aware paths, where necessary
+
+   * Is existing output at the right status/note/debug level?
+
+#. Can `verify` be done without downloading everything?
+
+   * If we know that we are talking to an hg server, we can leverage the fact
+     that it validates in the Batch API portion, and skip d/l altogether.  OTOH,
+     maybe we should download the files unconditionally for forensics.  The
+     alternative is to define a custom transfer handler that definitively
+     verifies without transferring, and then cache those results.  When verify
+     comes looking, look in the cache instead of actually opening the file and
+     processing it.
+
+   * Yuya has concerns about when blob fetch takes place vs when revlog is
+     verified.  Since the visible hash matches the blob content, I don't think
+     there's a way to verify the pointer file that's actually stored in the
+     filelog (other than basic JSON checks).  Full verification requires the
+     blob.  See
+     https://www.mercurial-scm.org/pipermail/mercurial-devel/2018-April/116133.html
+
+   * Opening a corrupt pointer file aborts.  It probably shouldn't for verify.
+
+
+Future ideas/features/polishing
+-------------------------------
+
+These aren't in any particular order, and are things that don't have obvious BC
+concerns.
+
+#. Garbage collection `(issue5790) <https://bz.mercurial-scm.org/show_bug.cgi?id=5790>`_
+
+   * This gets complicated because of the global cache, which may or may not
+     consist of hardlinks to the repo, and may be in use by other repos.  (So
+     the gc may be pointless.)
+
+#. `Compress blobs <https://github.com/git-lfs/git-lfs/issues/260>`_
+
+   * 700MB repo becomes 2.5GB with all lfs blobs
+   * What implications are there for filesystem paths that don't indicate
+     compression?  (i.e. how to share with global cache and other local repos?)
+   * Probably needs to be stored under `.hg/store/lfs/zstd`, with a repo
+     requirement.
+   * Allow tuneable compression type and settings?
+   * Support compression over the wire if both sides understand the compression?
+   * `debugupgraderepo` to convert?
+   * Probably not worth supporting compressed and uncompressed concurrently
+
+#. Determine things to upload with `readfast()
+   <https://www.mercurial-scm.org/pipermail/mercurial-devel/2018-August/121315.html>`_
+
+   * Significantly faster when pushing an entire large repo to http.
+   * Causes test changes to fileset and templates; may need both this and
+     current methods of lookup.
+
+#. Is a command to download everything needed?  This would allow copying the
+   whole to a portable drive.  Currently this can be effected by running
+   `hg verify`.
+
+#. Stop reading in entire file into one buffer when passing through filelog
+   interface
+
+   * `Requires major replumbing to core
+     <https://www.mercurial-scm.org/wiki/HandlingLargeFiles>`_
+
+#. Keep corrupt files around in 'store/lfs/incoming' for forensics?
+
+   * Files should be downloaded to 'incoming', and moved to normal location when
+     done.
+
+#. Client side path enhancements
+
+   * Support paths.default:lfs = ... style paths
+   * SSH -> https server inference
+
+     * https://www.mercurial-scm.org/pipermail/mercurial-devel/2018-April/115416.html
+     * https://github.com/git-lfs/git-lfs/blob/master/docs/api/server-discovery.md#guessing-the-server
+
+#. Server enhancements
+
+   * Add support for transfer quotas?
+   * Download should be able to send the file in chunks, without reading the
+     whole thing into memory
+     (https://www.mercurial-scm.org/pipermail/mercurial-devel/2018-March/114584.html)
+   * Support for resuming transfers
+
+#. Handle 3rd party server storage.
+
+   * Teach client to handle lfs `verify` action.  This is needed after the
+     server instructs the client to upload the file to another server, in order
+     to tell the server that the upload completed.
+   * Teach the server to send redirects if configured, and process `verify`
+     requests.
+
+#. `Is any hg-git work needed
+   <https://groups.google.com/d/msg/hg-git/XYNQuudteeM/ivt8gXoZAAAJ>`_?
--- a/hgext/narrow/narrowcommands.py	Mon May 20 10:08:28 2019 +0200
+++ b/hgext/narrow/narrowcommands.py	Mon May 20 11:40:47 2019 -0400
@@ -216,7 +216,7 @@
                     todelete.append(f)
             elif f.startswith('meta/'):
                 dir = f[5:-13]
-                dirs = ['.'] + sorted(util.dirs({dir})) + [dir]
+                dirs = sorted(util.dirs({dir})) + [dir]
                 include = True
                 for d in dirs:
                     visit = newmatch.visitdir(d)
--- a/hgext/phabricator.py	Mon May 20 10:08:28 2019 +0200
+++ b/hgext/phabricator.py	Mon May 20 11:40:47 2019 -0400
@@ -65,6 +65,7 @@
     scmutil,
     smartset,
     tags,
+    templatefilters,
     templateutil,
     url as urlmod,
     util,
@@ -126,6 +127,13 @@
 
 def vcrcommand(name, flags, spec, helpcategory=None):
     fullflags = flags + _VCR_FLAGS
+    def hgmatcher(r1, r2):
+        if r1.uri != r2.uri or r1.method != r2.method:
+            return False
+        r1params = r1.body.split(b'&')
+        r2params = r2.body.split(b'&')
+        return set(r1params) == set(r2params)
+
     def decorate(fn):
         def inner(*args, **kwargs):
             cassette = pycompat.fsdecode(kwargs.pop(r'test_vcr', None))
@@ -142,7 +150,8 @@
                             (urlmod, r'httpsconnection',
                              stubs.VCRHTTPSConnection),
                         ])
-                    with vcr.use_cassette(cassette):
+                    vcr.register_matcher(r'hgmatcher', hgmatcher)
+                    with vcr.use_cassette(cassette, match_on=[r'hgmatcher']):
                         return fn(*args, **kwargs)
             return fn(*args, **kwargs)
         inner.__name__ = fn.__name__
@@ -380,11 +389,12 @@
     params = {
         b'diff_id': diff[b'id'],
         b'name': b'hg:meta',
-        b'data': json.dumps({
-            u'user': encoding.unifromlocal(ctx.user()),
-            u'date': u'{:.0f} {}'.format(*ctx.date()),
-            u'node': encoding.unifromlocal(ctx.hex()),
-            u'parent': encoding.unifromlocal(ctx.p1().hex()),
+        b'data': templatefilters.json({
+            b'user': ctx.user(),
+            b'date': b'%d %d' % ctx.date(),
+            b'branch': ctx.branch(),
+            b'node': ctx.hex(),
+            b'parent': ctx.p1().hex(),
         }),
     }
     callconduit(ctx.repo(), b'differential.setdiffproperty', params)
@@ -392,12 +402,14 @@
     params = {
         b'diff_id': diff[b'id'],
         b'name': b'local:commits',
-        b'data': json.dumps({
-            encoding.unifromlocal(ctx.hex()): {
-                u'author': encoding.unifromlocal(stringutil.person(ctx.user())),
-                u'authorEmail': encoding.unifromlocal(
-                    stringutil.email(ctx.user())),
-                u'time': u'{:.0f}'.format(ctx.date()[0]),
+        b'data': templatefilters.json({
+            ctx.hex(): {
+                b'author': stringutil.person(ctx.user()),
+                b'authorEmail': stringutil.email(ctx.user()),
+                b'time': int(ctx.date()[0]),
+                b'commit': ctx.hex(),
+                b'parents': [ctx.p1().hex()],
+                b'branch': ctx.branch(),
             },
         }),
     }
@@ -632,7 +644,8 @@
 # Map from "hg:meta" keys to header understood by "hg import". The order is
 # consistent with "hg export" output.
 _metanamemap = util.sortdict([(b'user', b'User'), (b'date', b'Date'),
-                              (b'node', b'Node ID'), (b'parent', b'Parent ')])
+                              (b'branch', b'Branch'), (b'node', b'Node ID'),
+                              (b'parent', b'Parent ')])
 
 def _confirmbeforesend(repo, revs, oldmap):
     url, token = readurltoken(repo)
@@ -901,16 +914,31 @@
     """
     props = diff.get(b'properties') or {}
     meta = props.get(b'hg:meta')
-    if not meta and props.get(b'local:commits'):
-        commit = sorted(props[b'local:commits'].values())[0]
-        meta = {
-            b'date': b'%d 0' % commit[b'time'],
-            b'node': commit[b'rev'],
-            b'user': b'%s <%s>' % (commit[b'author'], commit[b'authorEmail']),
-        }
-        if len(commit.get(b'parents', ())) >= 1:
-            meta[b'parent'] = commit[b'parents'][0]
-    return meta or {}
+    if not meta:
+        if props.get(b'local:commits'):
+            commit = sorted(props[b'local:commits'].values())[0]
+            meta = {}
+            if b'author' in commit and b'authorEmail' in commit:
+                meta[b'user'] = b'%s <%s>' % (commit[b'author'],
+                                              commit[b'authorEmail'])
+            if b'time' in commit:
+                meta[b'date'] = b'%d 0' % commit[b'time']
+            if b'branch' in commit:
+                meta[b'branch'] = commit[b'branch']
+            node = commit.get(b'commit', commit.get(b'rev'))
+            if node:
+                meta[b'node'] = node
+            if len(commit.get(b'parents', ())) >= 1:
+                meta[b'parent'] = commit[b'parents'][0]
+        else:
+            meta = {}
+    if b'date' not in meta and b'dateCreated' in diff:
+        meta[b'date'] = b'%s 0' % diff[b'dateCreated']
+    if b'branch' not in meta and diff.get(b'branch'):
+        meta[b'branch'] = diff[b'branch']
+    if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
+        meta[b'parent'] = diff[b'sourceControlBaseRevision']
+    return meta
 
 def readpatch(repo, drevs, write):
     """generate plain-text patch readable by 'hg import'
--- a/hgext/rebase.py	Mon May 20 10:08:28 2019 +0200
+++ b/hgext/rebase.py	Mon May 20 11:40:47 2019 -0400
@@ -108,7 +108,9 @@
 
 @revsetpredicate('_destautoorphanrebase')
 def _revsetdestautoorphanrebase(repo, subset, x):
-    """automatic rebase destination for a single orphan revision"""
+    # ``_destautoorphanrebase()``
+
+    # automatic rebase destination for a single orphan revision.
     unfi = repo.unfiltered()
     obsoleted = unfi.revs('obsolete()')
 
--- a/hgext/remotefilelog/__init__.py	Mon May 20 10:08:28 2019 +0200
+++ b/hgext/remotefilelog/__init__.py	Mon May 20 11:40:47 2019 -0400
@@ -293,6 +293,35 @@
     # debugdata needs remotefilelog.len to work
     extensions.wrapcommand(commands.table, 'debugdata', debugdatashallow)
 
+    changegroup.cgpacker = shallowbundle.shallowcg1packer
+
+    extensions.wrapfunction(changegroup, '_addchangegroupfiles',
+                            shallowbundle.addchangegroupfiles)
+    extensions.wrapfunction(
+        changegroup, 'makechangegroup', shallowbundle.makechangegroup)
+    extensions.wrapfunction(localrepo, 'makestore', storewrapper)
+    extensions.wrapfunction(exchange, 'pull', exchangepull)
+    extensions.wrapfunction(merge, 'applyupdates', applyupdates)
+    extensions.wrapfunction(merge, '_checkunknownfiles', checkunknownfiles)
+    extensions.wrapfunction(context.workingctx, '_checklookup', checklookup)
+    extensions.wrapfunction(scmutil, '_findrenames', findrenames)
+    extensions.wrapfunction(copies, '_computeforwardmissing',
+                            computeforwardmissing)
+    extensions.wrapfunction(dispatch, 'runcommand', runcommand)
+    extensions.wrapfunction(repair, '_collectbrokencsets', _collectbrokencsets)
+    extensions.wrapfunction(context.changectx, 'filectx', filectx)
+    extensions.wrapfunction(context.workingctx, 'filectx', workingfilectx)
+    extensions.wrapfunction(patch, 'trydiff', trydiff)
+    extensions.wrapfunction(hg, 'verify', _verify)
+    scmutil.fileprefetchhooks.add('remotefilelog', _fileprefetchhook)
+
+    # disappointing hacks below
+    scmutil.getrenamedfn = getrenamedfn
+    extensions.wrapfunction(revset, 'filelog', filelogrevset)
+    revset.symbols['filelog'] = revset.filelog
+    extensions.wrapfunction(cmdutil, 'walkfilerevs', walkfilerevs)
+
+
 def cloneshallow(orig, ui, repo, *args, **opts):
     if opts.get(r'shallow'):
         repos = []
@@ -405,6 +434,156 @@
     shallowrepo.wraprepo(repo)
     repo.store = shallowstore.wrapstore(repo.store)
 
+def storewrapper(orig, requirements, path, vfstype):
+    s = orig(requirements, path, vfstype)
+    if constants.SHALLOWREPO_REQUIREMENT in requirements:
+        s = shallowstore.wrapstore(s)
+
+    return s
+
+# prefetch files before update
+def applyupdates(orig, repo, actions, wctx, mctx, overwrite, labels=None):
+    if isenabled(repo):
+        manifest = mctx.manifest()
+        files = []
+        for f, args, msg in actions['g']:
+            files.append((f, hex(manifest[f])))
+        # batch fetch the needed files from the server
+        repo.fileservice.prefetch(files)
+    return orig(repo, actions, wctx, mctx, overwrite, labels=labels)
+
+# Prefetch merge checkunknownfiles
+def checkunknownfiles(orig, repo, wctx, mctx, force, actions,
+    *args, **kwargs):
+    if isenabled(repo):
+        files = []
+        sparsematch = repo.maybesparsematch(mctx.rev())
+        for f, (m, actionargs, msg) in actions.iteritems():
+            if sparsematch and not sparsematch(f):
+                continue
+            if m in ('c', 'dc', 'cm'):
+                files.append((f, hex(mctx.filenode(f))))
+            elif m == 'dg':
+                f2 = actionargs[0]
+                files.append((f2, hex(mctx.filenode(f2))))
+        # batch fetch the needed files from the server
+        repo.fileservice.prefetch(files)
+    return orig(repo, wctx, mctx, force, actions, *args, **kwargs)
+
+# Prefetch files before status attempts to look at their size and contents
+def checklookup(orig, self, files):
+    repo = self._repo
+    if isenabled(repo):
+        prefetchfiles = []
+        for parent in self._parents:
+            for f in files:
+                if f in parent:
+                    prefetchfiles.append((f, hex(parent.filenode(f))))
+        # batch fetch the needed files from the server
+        repo.fileservice.prefetch(prefetchfiles)
+    return orig(self, files)
+
+# Prefetch the logic that compares added and removed files for renames
+def findrenames(orig, repo, matcher, added, removed, *args, **kwargs):
+    if isenabled(repo):
+        files = []
+        pmf = repo['.'].manifest()
+        for f in removed:
+            if f in pmf:
+                files.append((f, hex(pmf[f])))
+        # batch fetch the needed files from the server
+        repo.fileservice.prefetch(files)
+    return orig(repo, matcher, added, removed, *args, **kwargs)
+
+# prefetch files before pathcopies check
+def computeforwardmissing(orig, a, b, match=None):
+    missing = orig(a, b, match=match)
+    repo = a._repo
+    if isenabled(repo):
+        mb = b.manifest()
+
+        files = []
+        sparsematch = repo.maybesparsematch(b.rev())
+        if sparsematch:
+            sparsemissing = set()
+            for f in missing:
+                if sparsematch(f):
+                    files.append((f, hex(mb[f])))
+                    sparsemissing.add(f)
+            missing = sparsemissing
+
+        # batch fetch the needed files from the server
+        repo.fileservice.prefetch(files)
+    return missing
+
+# close cache miss server connection after the command has finished
+def runcommand(orig, lui, repo, *args, **kwargs):
+    fileservice = None
+    # repo can be None when running in chg:
+    # - at startup, reposetup was called because serve is not norepo
+    # - a norepo command like "help" is called
+    if repo and isenabled(repo):
+        fileservice = repo.fileservice
+    try:
+        return orig(lui, repo, *args, **kwargs)
+    finally:
+        if fileservice:
+            fileservice.close()
+
+# prevent strip from stripping remotefilelogs
+def _collectbrokencsets(orig, repo, files, striprev):
+    if isenabled(repo):
+        files = list([f for f in files if not repo.shallowmatch(f)])
+    return orig(repo, files, striprev)
+
+# changectx wrappers
+def filectx(orig, self, path, fileid=None, filelog=None):
+    if fileid is None:
+        fileid = self.filenode(path)
+    if (isenabled(self._repo) and self._repo.shallowmatch(path)):
+        return remotefilectx.remotefilectx(self._repo, path, fileid=fileid,
+                                           changectx=self, filelog=filelog)
+    return orig(self, path, fileid=fileid, filelog=filelog)
+
+def workingfilectx(orig, self, path, filelog=None):
+    if (isenabled(self._repo) and self._repo.shallowmatch(path)):
+        return remotefilectx.remoteworkingfilectx(self._repo, path,
+                                                  workingctx=self,
+                                                  filelog=filelog)
+    return orig(self, path, filelog=filelog)
+
+# prefetch required revisions before a diff
+def trydiff(orig, repo, revs, ctx1, ctx2, modified, added, removed,
+    copy, getfilectx, *args, **kwargs):
+    if isenabled(repo):
+        prefetch = []
+        mf1 = ctx1.manifest()
+        for fname in modified + added + removed:
+            if fname in mf1:
+                fnode = getfilectx(fname, ctx1).filenode()
+                # fnode can be None if it's a edited working ctx file
+                if fnode:
+                    prefetch.append((fname, hex(fnode)))
+            if fname not in removed:
+                fnode = getfilectx(fname, ctx2).filenode()
+                if fnode:
+                    prefetch.append((fname, hex(fnode)))
+
+        repo.fileservice.prefetch(prefetch)
+
+    return orig(repo, revs, ctx1, ctx2, modified, added, removed, copy,
+                getfilectx, *args, **kwargs)
+
+# Prevent verify from processing files
+# a stub for mercurial.hg.verify()
+def _verify(orig, repo, level=None):
+    lock = repo.lock()
+    try:
+        return shallowverifier.shallowverifier(repo).verify()
+    finally:
+        lock.release()
+
+
 clientonetime = False
 def onetimeclientsetup(ui):
     global clientonetime
@@ -412,163 +591,6 @@
         return
     clientonetime = True
 
-    changegroup.cgpacker = shallowbundle.shallowcg1packer
-
-    extensions.wrapfunction(changegroup, '_addchangegroupfiles',
-                            shallowbundle.addchangegroupfiles)
-    extensions.wrapfunction(
-        changegroup, 'makechangegroup', shallowbundle.makechangegroup)
-
-    def storewrapper(orig, requirements, path, vfstype):
-        s = orig(requirements, path, vfstype)
-        if constants.SHALLOWREPO_REQUIREMENT in requirements:
-            s = shallowstore.wrapstore(s)
-
-        return s
-    extensions.wrapfunction(localrepo, 'makestore', storewrapper)
-
-    extensions.wrapfunction(exchange, 'pull', exchangepull)
-
-    # prefetch files before update
-    def applyupdates(orig, repo, actions, wctx, mctx, overwrite, labels=None):
-        if isenabled(repo):
-            manifest = mctx.manifest()
-            files = []
-            for f, args, msg in actions['g']:
-                files.append((f, hex(manifest[f])))
-            # batch fetch the needed files from the server
-            repo.fileservice.prefetch(files)
-        return orig(repo, actions, wctx, mctx, overwrite, labels=labels)
-    extensions.wrapfunction(merge, 'applyupdates', applyupdates)
-
-    # Prefetch merge checkunknownfiles
-    def checkunknownfiles(orig, repo, wctx, mctx, force, actions,
-                          *args, **kwargs):
-        if isenabled(repo):
-            files = []
-            sparsematch = repo.maybesparsematch(mctx.rev())
-            for f, (m, actionargs, msg) in actions.iteritems():
-                if sparsematch and not sparsematch(f):
-                    continue
-                if m in ('c', 'dc', 'cm'):
-                    files.append((f, hex(mctx.filenode(f))))
-                elif m == 'dg':
-                    f2 = actionargs[0]
-                    files.append((f2, hex(mctx.filenode(f2))))
-            # batch fetch the needed files from the server
-            repo.fileservice.prefetch(files)
-        return orig(repo, wctx, mctx, force, actions, *args, **kwargs)
-    extensions.wrapfunction(merge, '_checkunknownfiles', checkunknownfiles)
-
-    # Prefetch files before status attempts to look at their size and contents
-    def checklookup(orig, self, files):
-        repo = self._repo
-        if isenabled(repo):
-            prefetchfiles = []
-            for parent in self._parents:
-                for f in files:
-                    if f in parent:
-                        prefetchfiles.append((f, hex(parent.filenode(f))))
-            # batch fetch the needed files from the server
-            repo.fileservice.prefetch(prefetchfiles)
-        return orig(self, files)
-    extensions.wrapfunction(context.workingctx, '_checklookup', checklookup)
-
-    # Prefetch the logic that compares added and removed files for renames
-    def findrenames(orig, repo, matcher, added, removed, *args, **kwargs):
-        if isenabled(repo):
-            files = []
-            pmf = repo['.'].manifest()
-            for f in removed:
-                if f in pmf:
-                    files.append((f, hex(pmf[f])))
-            # batch fetch the needed files from the server
-            repo.fileservice.prefetch(files)
-        return orig(repo, matcher, added, removed, *args, **kwargs)
-    extensions.wrapfunction(scmutil, '_findrenames', findrenames)
-
-    # prefetch files before mergecopies check
-    def computenonoverlap(orig, repo, c1, c2, *args, **kwargs):
-        u1, u2 = orig(repo, c1, c2, *args, **kwargs)
-        if isenabled(repo):
-            m1 = c1.manifest()
-            m2 = c2.manifest()
-            files = []
-
-            sparsematch1 = repo.maybesparsematch(c1.rev())
-            if sparsematch1:
-                sparseu1 = set()
-                for f in u1:
-                    if sparsematch1(f):
-                        files.append((f, hex(m1[f])))
-                        sparseu1.add(f)
-                u1 = sparseu1
-
-            sparsematch2 = repo.maybesparsematch(c2.rev())
-            if sparsematch2:
-                sparseu2 = set()
-                for f in u2:
-                    if sparsematch2(f):
-                        files.append((f, hex(m2[f])))
-                        sparseu2.add(f)
-                u2 = sparseu2
-
-            # batch fetch the needed files from the server
-            repo.fileservice.prefetch(files)
-        return u1, u2
-    extensions.wrapfunction(copies, '_computenonoverlap', computenonoverlap)
-
-    # prefetch files before pathcopies check
-    def computeforwardmissing(orig, a, b, match=None):
-        missing = orig(a, b, match=match)
-        repo = a._repo
-        if isenabled(repo):
-            mb = b.manifest()
-
-            files = []
-            sparsematch = repo.maybesparsematch(b.rev())
-            if sparsematch:
-                sparsemissing = set()
-                for f in missing:
-                    if sparsematch(f):
-                        files.append((f, hex(mb[f])))
-                        sparsemissing.add(f)
-                missing = sparsemissing
-
-            # batch fetch the needed files from the server
-            repo.fileservice.prefetch(files)
-        return missing
-    extensions.wrapfunction(copies, '_computeforwardmissing',
-                            computeforwardmissing)
-
-    # close cache miss server connection after the command has finished
-    def runcommand(orig, lui, repo, *args, **kwargs):
-        fileservice = None
-        # repo can be None when running in chg:
-        # - at startup, reposetup was called because serve is not norepo
-        # - a norepo command like "help" is called
-        if repo and isenabled(repo):
-            fileservice = repo.fileservice
-        try:
-            return orig(lui, repo, *args, **kwargs)
-        finally:
-            if fileservice:
-                fileservice.close()
-    extensions.wrapfunction(dispatch, 'runcommand', runcommand)
-
-    # disappointing hacks below
-    scmutil.getrenamedfn = getrenamedfn
-    extensions.wrapfunction(revset, 'filelog', filelogrevset)
-    revset.symbols['filelog'] = revset.filelog
-    extensions.wrapfunction(cmdutil, 'walkfilerevs', walkfilerevs)
-
-    # prevent strip from stripping remotefilelogs
-    def _collectbrokencsets(orig, repo, files, striprev):
-        if isenabled(repo):
-            files = list([f for f in files if not repo.shallowmatch(f)])
-        return orig(repo, files, striprev)
-    extensions.wrapfunction(repair, '_collectbrokencsets', _collectbrokencsets)
-
     # Don't commit filelogs until we know the commit hash, since the hash
     # is present in the filelog blob.
     # This violates Mercurial's filelog->manifest->changelog write order,
@@ -611,59 +633,6 @@
         return node
     extensions.wrapfunction(changelog.changelog, 'add', changelogadd)
 
-    # changectx wrappers
-    def filectx(orig, self, path, fileid=None, filelog=None):
-        if fileid is None:
-            fileid = self.filenode(path)
-        if (isenabled(self._repo) and self._repo.shallowmatch(path)):
-            return remotefilectx.remotefilectx(self._repo, path,
-                fileid=fileid, changectx=self, filelog=filelog)
-        return orig(self, path, fileid=fileid, filelog=filelog)
-    extensions.wrapfunction(context.changectx, 'filectx', filectx)
-
-    def workingfilectx(orig, self, path, filelog=None):
-        if (isenabled(self._repo) and self._repo.shallowmatch(path)):
-            return remotefilectx.remoteworkingfilectx(self._repo,
-                path, workingctx=self, filelog=filelog)
-        return orig(self, path, filelog=filelog)
-    extensions.wrapfunction(context.workingctx, 'filectx', workingfilectx)
-
-    # prefetch required revisions before a diff
-    def trydiff(orig, repo, revs, ctx1, ctx2, modified, added, removed,
-                copy, getfilectx, *args, **kwargs):
-        if isenabled(repo):
-            prefetch = []
-            mf1 = ctx1.manifest()
-            for fname in modified + added + removed:
-                if fname in mf1:
-                    fnode = getfilectx(fname, ctx1).filenode()
-                    # fnode can be None if it's a edited working ctx file
-                    if fnode:
-                        prefetch.append((fname, hex(fnode)))
-                if fname not in removed:
-                    fnode = getfilectx(fname, ctx2).filenode()
-                    if fnode:
-                        prefetch.append((fname, hex(fnode)))
-
-            repo.fileservice.prefetch(prefetch)
-
-        return orig(repo, revs, ctx1, ctx2, modified, added, removed,
-            copy, getfilectx, *args, **kwargs)
-    extensions.wrapfunction(patch, 'trydiff', trydiff)
-
-    # Prevent verify from processing files
-    # a stub for mercurial.hg.verify()
-    def _verify(orig, repo):
-        lock = repo.lock()
-        try:
-            return shallowverifier.shallowverifier(repo).verify()
-        finally:
-            lock.release()
-
-    extensions.wrapfunction(hg, 'verify', _verify)
-
-    scmutil.fileprefetchhooks.add('remotefilelog', _fileprefetchhook)
-
 def getrenamedfn(repo, endrev=None):
     rcache = {}
 
--- a/hgext/remotefilelog/fileserverclient.py	Mon May 20 10:08:28 2019 +0200
+++ b/hgext/remotefilelog/fileserverclient.py	Mon May 20 11:40:47 2019 -0400
@@ -396,6 +396,9 @@
                                 batchdefault = 10
                             batchsize = self.ui.configint(
                                 'remotefilelog', 'batchsize', batchdefault)
+                            self.ui.debug(
+                                b'requesting %d files from '
+                                b'remotefilelog server...\n' % len(missed))
                             _getfilesbatch(
                                 remote, self.receivemissing, progress.increment,
                                 missed, idmap, batchsize)
--- a/hgext/share.py	Mon May 20 10:08:28 2019 +0200
+++ b/hgext/share.py	Mon May 20 11:40:47 2019 -0400
@@ -125,6 +125,10 @@
 
 def _hassharedbookmarks(repo):
     """Returns whether this repo has shared bookmarks"""
+    if bookmarks.bookmarksinstore(repo):
+        # Kind of a lie, but it means that we skip our custom reads and writes
+        # from/to the source repo.
+        return False
     try:
         shared = repo.vfs.read('shared').splitlines()
     except IOError as inst:
--- a/hgext/shelve.py	Mon May 20 10:08:28 2019 +0200
+++ b/hgext/shelve.py	Mon May 20 11:40:47 2019 -0400
@@ -912,7 +912,7 @@
     that causes a conflict. This reverts the unshelved changes, and
     leaves the bundle in place.)
 
-    If bare shelved change(when no files are specified, without interactive,
+    If bare shelved change (when no files are specified, without interactive,
     include and exclude option) was done on newly created branch it would
     restore branch information to the working directory.
 
--- a/mercurial/__init__.py	Mon May 20 10:08:28 2019 +0200
+++ b/mercurial/__init__.py	Mon May 20 11:40:47 2019 -0400
@@ -54,7 +54,16 @@
                 if finder == self:
                     continue
 
-                spec = finder.find_spec(fullname, path, target=target)
+                # Originally the API was a `find_module` method, but it was
+                # renamed to `find_spec` in python 3.4, with a new `target`
+                # argument.
+                find_spec_method = getattr(finder, 'find_spec', None)
+                if find_spec_method:
+                    spec = find_spec_method(fullname, path, target=target)
+                else:
+                    spec = finder.find_module(fullname)
+                    if spec is not None:
+                        spec = importlib.util.spec_from_loader(fullname, spec)
                 if spec:
                     break
 
--- a/mercurial/bookmarks.py	Mon May 20 10:08:28 2019 +0200
+++ b/mercurial/bookmarks.py	Mon May 20 11:40:47 2019 -0400
@@ -33,6 +33,14 @@
 # custom styles
 activebookmarklabel = 'bookmarks.active bookmarks.current'
 
+BOOKMARKS_IN_STORE_REQUIREMENT = 'bookmarksinstore'
+
+def bookmarksinstore(repo):
+    return BOOKMARKS_IN_STORE_REQUIREMENT in repo.requirements
+
+def bookmarksvfs(repo):
+    return repo.svfs if bookmarksinstore(repo) else repo.vfs
+
 def _getbkfile(repo):
     """Hook so that extensions that mess with the store can hook bm storage.
 
@@ -40,7 +48,7 @@
     bookmarks or the committed ones. Other extensions (like share)
     may need to tweak this behavior further.
     """
-    fp, pending = txnutil.trypending(repo.root, repo.vfs, 'bookmarks')
+    fp, pending = txnutil.trypending(repo.root, bookmarksvfs(repo), 'bookmarks')
     return fp
 
 class bmstore(object):
@@ -91,8 +99,11 @@
                         # ValueError:
                         # - node in nm, for non-20-bytes entry
                         # - split(...), for string without ' '
-                        repo.ui.warn(_('malformed line in .hg/bookmarks: %r\n')
-                                     % pycompat.bytestr(line))
+                        bookmarkspath = '.hg/bookmarks'
+                        if bookmarksinstore(repo):
+                            bookmarkspath = '.hg/store/bookmarks'
+                        repo.ui.warn(_('malformed line in %s: %r\n')
+                                     % (bookmarkspath, pycompat.bytestr(line)))
         except IOError as inst:
             if inst.errno != errno.ENOENT:
                 raise
@@ -192,8 +203,9 @@
         """record that bookmarks have been changed in a transaction
 
         The transaction is then responsible for updating the file content."""
+        location = '' if bookmarksinstore(self._repo) else 'plain'
         tr.addfilegenerator('bookmarks', ('bookmarks',), self._write,
-                            location='plain')
+                            location=location)
         tr.hookargs['bookmark_moved'] = '1'
 
     def _writerepo(self, repo):
@@ -203,28 +215,24 @@
             rbm.active = None
             rbm._writeactive()
 
-        with repo.wlock():
-            file_ = repo.vfs('bookmarks', 'w', atomictemp=True,
-                             checkambig=True)
-            try:
-                self._write(file_)
-            except: # re-raises
-                file_.discard()
-                raise
-            finally:
-                file_.close()
+        if bookmarksinstore(repo):
+            vfs = repo.svfs
+            lock = repo.lock()
+        else:
+            vfs = repo.vfs
+            lock = repo.wlock()
+        with lock:
+            with vfs('bookmarks', 'w', atomictemp=True, checkambig=True) as f:
+                self._write(f)
 
     def _writeactive(self):
         if self._aclean:
             return
         with self._repo.wlock():
             if self._active is not None:
-                f = self._repo.vfs('bookmarks.current', 'w', atomictemp=True,
-                                   checkambig=True)
-                try:
+                with self._repo.vfs('bookmarks.current', 'w', atomictemp=True,
+                                   checkambig=True) as f:
                     f.write(encoding.fromlocal(self._active))
-                finally:
-                    f.close()
             else:
                 self._repo.vfs.tryunlink('bookmarks.current')
         self._aclean = True
@@ -306,28 +314,12 @@
     itself as we commit. This function returns the name of that bookmark.
     It is stored in .hg/bookmarks.current
     """
-    try:
-        file = repo.vfs('bookmarks.current')
-    except IOError as inst:
-        if inst.errno != errno.ENOENT:
-            raise
-        return None
-    try:
-        # No readline() in osutil.posixfile, reading everything is
-        # cheap.
-        # Note that it's possible for readlines() here to raise
-        # IOError, since we might be reading the active mark over
-        # static-http which only tries to load the file when we try
-        # to read from it.
-        mark = encoding.tolocal((file.readlines() or [''])[0])
-        if mark == '' or mark not in marks:
-            mark = None
-    except IOError as inst:
-        if inst.errno != errno.ENOENT:
-            raise
-        return None
-    finally:
-        file.close()
+    # No readline() in osutil.posixfile, reading everything is
+    # cheap.
+    content = repo.vfs.tryread('bookmarks.current')
+    mark = encoding.tolocal((content.splitlines() or [''])[0])
+    if mark == '' or mark not in marks:
+        mark = None
     return mark
 
 def activate(repo, mark):
@@ -453,7 +445,11 @@
     return d
 
 def pushbookmark(repo, key, old, new):
-    with repo.wlock(), repo.lock(), repo.transaction('bookmarks') as tr:
+    if bookmarksinstore(repo):
+        wlock = util.nullcontextmanager()
+    else:
+        wlock = repo.wlock()
+    with wlock, repo.lock(), repo.transaction('bookmarks') as tr:
         marks = repo._bookmarks
         existing = hex(marks.get(key, ''))
         if existing != old and existing != new:
--- a/mercurial/branchmap.py	Mon May 20 10:08:28 2019 +0200
+++ b/mercurial/branchmap.py	Mon May 20 11:40:47 2019 -0400
@@ -378,6 +378,10 @@
         # fetch current topological heads to speed up filtering
         topoheads = set(cl.headrevs())
 
+        # new tip revision which we found after iterating items from new
+        # branches
+        ntiprev = self.tiprev
+
         # if older branchheads are reachable from new ones, they aren't
         # really branchheads. Note checking parents is insufficient:
         # 1 (branch a) -> 2 (branch b) -> 3 (branch a)
@@ -401,9 +405,12 @@
             bheadrevs = sorted(bheadset)
             self[branch] = [cl.node(rev) for rev in bheadrevs]
             tiprev = bheadrevs[-1]
-            if tiprev > self.tiprev:
-                self.tipnode = cl.node(tiprev)
-                self.tiprev = tiprev
+            if tiprev > ntiprev:
+                ntiprev = tiprev
+
+        if ntiprev > self.tiprev:
+            self.tiprev = ntiprev
+            self.tipnode = cl.node(ntiprev)
 
         if not self.validfor(repo):
             # cache key are not valid anymore
@@ -608,51 +615,59 @@
         wlock = None
         step = ''
         try:
+            # write the new names
             if self._rbcnamescount < len(self._names):
-                step = ' names'
                 wlock = repo.wlock(wait=False)
-                if self._rbcnamescount != 0:
-                    f = repo.cachevfs.open(_rbcnames, 'ab')
-                    if f.tell() == self._rbcsnameslen:
-                        f.write('\0')
-                    else:
-                        f.close()
-                        repo.ui.debug("%s changed - rewriting it\n" % _rbcnames)
-                        self._rbcnamescount = 0
-                        self._rbcrevslen = 0
-                if self._rbcnamescount == 0:
-                    # before rewriting names, make sure references are removed
-                    repo.cachevfs.unlinkpath(_rbcrevs, ignoremissing=True)
-                    f = repo.cachevfs.open(_rbcnames, 'wb')
-                f.write('\0'.join(encoding.fromlocal(b)
-                                  for b in self._names[self._rbcnamescount:]))
-                self._rbcsnameslen = f.tell()
-                f.close()
-                self._rbcnamescount = len(self._names)
+                step = ' names'
+                self._writenames(repo)
 
+            # write the new revs
             start = self._rbcrevslen * _rbcrecsize
             if start != len(self._rbcrevs):
                 step = ''
                 if wlock is None:
                     wlock = repo.wlock(wait=False)
-                revs = min(len(repo.changelog),
-                           len(self._rbcrevs) // _rbcrecsize)
-                f = repo.cachevfs.open(_rbcrevs, 'ab')
-                if f.tell() != start:
-                    repo.ui.debug("truncating cache/%s to %d\n"
-                                  % (_rbcrevs, start))
-                    f.seek(start)
-                    if f.tell() != start:
-                        start = 0
-                        f.seek(start)
-                    f.truncate()
-                end = revs * _rbcrecsize
-                f.write(self._rbcrevs[start:end])
-                f.close()
-                self._rbcrevslen = revs
+                self._writerevs(repo, start)
+
         except (IOError, OSError, error.Abort, error.LockError) as inst:
             repo.ui.debug("couldn't write revision branch cache%s: %s\n"
                           % (step, stringutil.forcebytestr(inst)))
         finally:
             if wlock is not None:
                 wlock.release()
+
+    def _writenames(self, repo):
+        """ write the new branch names to revbranchcache """
+        if self._rbcnamescount != 0:
+            f = repo.cachevfs.open(_rbcnames, 'ab')
+            if f.tell() == self._rbcsnameslen:
+                f.write('\0')
+            else:
+                f.close()
+                repo.ui.debug("%s changed - rewriting it\n" % _rbcnames)
+                self._rbcnamescount = 0
+                self._rbcrevslen = 0
+        if self._rbcnamescount == 0:
+            # before rewriting names, make sure references are removed
+            repo.cachevfs.unlinkpath(_rbcrevs, ignoremissing=True)
+            f = repo.cachevfs.open(_rbcnames, 'wb')
+        f.write('\0'.join(encoding.fromlocal(b)
+                          for b in self._names[self._rbcnamescount:]))
+        self._rbcsnameslen = f.tell()
+        f.close()
+        self._rbcnamescount = len(self._names)
+
+    def _writerevs(self, repo, start):
+        """ write the new revs to revbranchcache """
+        revs = min(len(repo.changelog), len(self._rbcrevs) // _rbcrecsize)
+        with repo.cachevfs.open(_rbcrevs, 'ab') as f:
+            if f.tell() != start:
+                repo.ui.debug("truncating cache/%s to %d\n" % (_rbcrevs, start))
+                f.seek(start)
+                if f.tell() != start:
+                    start = 0
+                    f.seek(start)
+                f.truncate()
+            end = revs * _rbcrecsize
+            f.write(self._rbcrevs[start:end])
+        self._rbcrevslen = revs
--- a/mercurial/cext/dirs.c	Mon May 20 10:08:28 2019 +0200
+++ b/mercurial/cext/dirs.c	Mon May 20 11:40:47 2019 -0400
@@ -42,6 +42,9 @@
 			break;
 		pos -= 1;
 	}
+	if (pos == -1) {
+	  return 0;
+	}
 
 	return pos;
 }
--- a/mercurial/cext/parsers.c	Mon May 20 10:08:28 2019 +0200
+++ b/mercurial/cext/parsers.c	Mon May 20 11:40:47 2019 -0400
@@ -667,10 +667,11 @@
 void manifest_module_init(PyObject *mod);
 void revlog_module_init(PyObject *mod);
 
-static const int version = 12;
+static const int version = 13;
 
 static void module_init(PyObject *mod)
 {
+	PyObject *capsule = NULL;
 	PyModule_AddIntConstant(mod, "version", version);
 
 	/* This module constant has two purposes.  First, it lets us unit test
@@ -687,6 +688,12 @@
 	manifest_module_init(mod);
 	revlog_module_init(mod);
 
+	capsule = PyCapsule_New(
+	    make_dirstate_tuple,
+	    "mercurial.cext.parsers.make_dirstate_tuple_CAPI", NULL);
+	if (capsule != NULL)
+		PyModule_AddObject(mod, "make_dirstate_tuple_CAPI", capsule);
+
 	if (PyType_Ready(&dirstateTupleType) < 0) {
 		return;
 	}
--- a/mercurial/changegroup.py	Mon May 20 10:08:28 2019 +0200
+++ b/mercurial/changegroup.py	Mon May 20 11:40:47 2019 -0400
@@ -1061,7 +1061,7 @@
         while tmfnodes:
             tree, nodes = tmfnodes.popitem()
 
-            should_visit = self._matcher.visitdir(tree[:-1] or '.')
+            should_visit = self._matcher.visitdir(tree[:-1])
             if tree and not should_visit:
                 continue
 
@@ -1093,7 +1093,7 @@
                 fullclnodes=self._fullclnodes,
                 precomputedellipsis=self._precomputedellipsis)
 
-            if not self._oldmatcher.visitdir(store.tree[:-1] or '.'):
+            if not self._oldmatcher.visitdir(store.tree[:-1]):
                 yield tree, deltas
             else:
                 # 'deltas' is a generator and we need to consume it even if
--- a/mercurial/changelog.py	Mon May 20 10:08:28 2019 +0200
+++ b/mercurial/changelog.py	Mon May 20 11:40:47 2019 -0400
@@ -194,6 +194,8 @@
     user = attr.ib(default='')
     date = attr.ib(default=(0, 0))
     files = attr.ib(default=attr.Factory(list))
+    p1copies = attr.ib(default=None)
+    p2copies = attr.ib(default=None)
     description = attr.ib(default='')
 
 class changelogrevision(object):
@@ -591,11 +593,11 @@
             elif branch in (".", "null", "tip"):
                 raise error.StorageError(_('the name \'%s\' is reserved')
                                          % branch)
-        if (p1copies or p2copies) and extra is None:
+        if (p1copies is not None or p2copies is not None) and extra is None:
             extra = {}
-        if p1copies:
+        if p1copies is not None:
             extra['p1copies'] = encodecopies(p1copies)
-        if p2copies:
+        if p2copies is not None:
             extra['p2copies'] = encodecopies(p2copies)
 
         if extra:
--- a/mercurial/cmdutil.py	Mon May 20 10:08:28 2019 +0200
+++ b/mercurial/cmdutil.py	Mon May 20 11:40:47 2019 -0400
@@ -278,8 +278,8 @@
         force = opts.get('force')
         if not force:
             vdirs = []
+            match = matchmod.badmatch(match, fail)
             match.explicitdir = vdirs.append
-            match.bad = fail
 
         status = repo.status(match=match)
 
--- a/mercurial/commands.py	Mon May 20 10:08:28 2019 +0200
+++ b/mercurial/commands.py	Mon May 20 11:40:47 2019 -0400
@@ -63,6 +63,7 @@
     tags as tagsmod,
     ui as uimod,
     util,
+    verify as verifymod,
     wireprotoserver,
 )
 from .utils import (
@@ -1674,6 +1675,8 @@
 
         if not bheads:
             raise error.Abort(_('can only close branch heads'))
+        elif branch == repo['.'].branch() and repo['.'].node() not in bheads:
+            raise error.Abort(_('can only close branch heads'))
         elif opts.get('amend'):
             if (repo['.'].p1().branch() != branch and
                 repo['.'].p2().branch() != branch):
@@ -1732,6 +1735,10 @@
 
     cmdutil.commitstatus(repo, node, branch, bheads, opts)
 
+    if not ui.quiet and ui.configbool('commands', 'commit.post-status'):
+        status(ui, repo, modified=True, added=True, removed=True, deleted=True,
+               unknown=True, subrepos=opts.get('subrepos'))
+
 @command('config|showconfig|debugconfig',
     [('u', 'untrusted', None, _('show untrusted configuration options')),
      ('e', 'edit', None, _('edit user config')),
@@ -3715,7 +3722,8 @@
      _('follow line range of specified file (EXPERIMENTAL)'),
      _('FILE,RANGE')),
     ('', 'removed', None, _('include revisions where files were removed')),
-    ('m', 'only-merges', None, _('show only merges (DEPRECATED)')),
+    ('m', 'only-merges', None,
+     _('show only merges (DEPRECATED) (use -r "merge()" instead)')),
     ('u', 'user', [], _('revisions committed by user'), _('USER')),
     ('', 'only-branch', [],
      _('show only changesets within the given named branch (DEPRECATED)'),
@@ -4672,7 +4680,7 @@
     """
     ret = repo.recover()
     if ret:
-        if opts['verify']:
+        if opts[r'verify']:
             return hg.verify(repo)
         else:
             msg = _("(verify step skipped, run  `hg verify` to check your "
@@ -6147,8 +6155,10 @@
                 ui.warn("(%s)\n" % obsfatemsg)
         return ret
 
-@command('verify', [], helpcategory=command.CATEGORY_MAINTENANCE)
-def verify(ui, repo):
+@command('verify',
+         [('', 'full', False, 'perform more checks (EXPERIMENTAL)')],
+         helpcategory=command.CATEGORY_MAINTENANCE)
+def verify(ui, repo, **opts):
     """verify the integrity of the repository
 
     Verify the integrity of the current repository.
@@ -6164,7 +6174,12 @@
 
     Returns 0 on success, 1 if errors are encountered.
     """
-    return hg.verify(repo)
+    opts = pycompat.byteskwargs(opts)
+
+    level = None
+    if opts['full']:
+        level = verifymod.VERIFY_FULL
+    return hg.verify(repo, level)
 
 @command(
     'version', [] + formatteropts, helpcategory=command.CATEGORY_HELP,
@@ -6233,16 +6248,6 @@
 def loadcmdtable(ui, name, cmdtable):
     """Load command functions from specified cmdtable
     """
-    cmdtable = cmdtable.copy()
-    for cmd in list(cmdtable):
-        if not cmd.startswith('^'):
-            continue
-        ui.deprecwarn("old-style command registration '%s' in extension '%s'"
-                      % (cmd, name), '4.8')
-        entry = cmdtable.pop(cmd)
-        entry[0].helpbasic = True
-        cmdtable[cmd[1:]] = entry
-
     overrides = [cmd for cmd in cmdtable if cmd in table]
     if overrides:
         ui.warn(_("extension '%s' overrides commands: %s\n")
--- a/mercurial/configitems.py	Mon May 20 10:08:28 2019 +0200
+++ b/mercurial/configitems.py	Mon May 20 11:40:47 2019 -0400
@@ -202,6 +202,9 @@
     default=dynamicdefault,
 )
 _registerdiffopts(section='commands', configprefix='commit.interactive.')
+coreconfigitem('commands', 'commit.post-status',
+    default=False,
+)
 coreconfigitem('commands', 'grep.all-files',
     default=False,
 )
@@ -526,12 +529,22 @@
 coreconfigitem('experimental', 'evolution.bundle-obsmarker',
     default=False,
 )
+coreconfigitem('experimental', 'log.topo',
+    default=False,
+)
 coreconfigitem('experimental', 'evolution.report-instabilities',
     default=True,
 )
 coreconfigitem('experimental', 'evolution.track-operation',
     default=True,
 )
+# repo-level config to exclude a revset visibility
+#
+# The target use case is to use `share` to expose different subset of the same
+# repository, especially server side. See also `server.view`.
+coreconfigitem('experimental', 'extra-filter-revs',
+    default=None,
+)
 coreconfigitem('experimental', 'maxdeltachainspan',
     default=-1,
 )
@@ -663,6 +676,9 @@
     default=None,
     generic=True,
 )
+coreconfigitem('format', 'bookmarks-in-store',
+    default=False,
+)
 coreconfigitem('format', 'chunkcachesize',
     default=None,
 )
--- a/mercurial/context.py	Mon May 20 10:08:28 2019 +0200
+++ b/mercurial/context.py	Mon May 20 11:40:47 2019 -0400
@@ -272,6 +272,30 @@
         except error.LookupError:
             return ''
 
+    @propertycache
+    def _copies(self):
+        p1copies = {}
+        p2copies = {}
+        p1 = self.p1()
+        p2 = self.p2()
+        narrowmatch = self._repo.narrowmatch()
+        for dst in self.files():
+            if not narrowmatch(dst) or dst not in self:
+                continue
+            copied = self[dst].renamed()
+            if not copied:
+                continue
+            src, srcnode = copied
+            if src in p1 and p1[src].filenode() == srcnode:
+                p1copies[dst] = src
+            elif src in p2 and p2[src].filenode() == srcnode:
+                p2copies[dst] = src
+        return p1copies, p2copies
+    def p1copies(self):
+        return self._copies[0]
+    def p2copies(self):
+        return self._copies[1]
+
     def sub(self, path, allowcreate=True):
         '''return a subrepo for the stored revision of path, never wdir()'''
         return subrepo.subrepo(self, path, allowcreate=allowcreate)
@@ -456,27 +480,7 @@
         # Otherwise (config said to read only from filelog, or we are in
         # compatiblity mode and there is not data in the changeset), we get
         # the copy metadata from the filelogs.
-        p1copies = {}
-        p2copies = {}
-        p1 = self.p1()
-        p2 = self.p2()
-        narrowmatch = self._repo.narrowmatch()
-        for dst in self.files():
-            if not narrowmatch(dst) or dst not in self:
-                continue
-            copied = self[dst].renamed()
-            if not copied:
-                continue
-            src, srcnode = copied
-            if src in p1 and p1[src].filenode() == srcnode:
-                p1copies[dst] = src
-            elif src in p2 and p2[src].filenode() == srcnode:
-                p2copies[dst] = src
-        return p1copies, p2copies
-    def p1copies(self):
-        return self._copies[0]
-    def p2copies(self):
-        return self._copies[1]
+        return super(changectx, self)._copies
     def description(self):
         return self._changeset.description
     def branch(self):
@@ -1098,7 +1102,7 @@
     """A committablectx object provides common functionality for a context that
     wants the ability to commit, e.g. workingctx or memctx."""
     def __init__(self, repo, text="", user=None, date=None, extra=None,
-                 changes=None):
+                 changes=None, branch=None):
         super(committablectx, self).__init__(repo)
         self._rev = None
         self._node = None
@@ -1113,13 +1117,9 @@
         self._extra = {}
         if extra:
             self._extra = extra.copy()
-        if 'branch' not in self._extra:
-            try:
-                branch = encoding.fromlocal(self._repo.dirstate.branch())
-            except UnicodeDecodeError:
-                raise error.Abort(_('branch name not in UTF-8!'))
-            self._extra['branch'] = branch
-        if self._extra['branch'] == '':
+        if branch is not None:
+            self._extra['branch'] = encoding.fromlocal(branch)
+        if not self._extra.get('branch'):
             self._extra['branch'] = 'default'
 
     def __bytes__(self):
@@ -1132,42 +1132,6 @@
 
     __bool__ = __nonzero__
 
-    def _buildflagfunc(self):
-        # Create a fallback function for getting file flags when the
-        # filesystem doesn't support them
-
-        copiesget = self._repo.dirstate.copies().get
-        parents = self.parents()
-        if len(parents) < 2:
-            # when we have one parent, it's easy: copy from parent
-            man = parents[0].manifest()
-            def func(f):
-                f = copiesget(f, f)
-                return man.flags(f)
-        else:
-            # merges are tricky: we try to reconstruct the unstored
-            # result from the merge (issue1802)
-            p1, p2 = parents
-            pa = p1.ancestor(p2)
-            m1, m2, ma = p1.manifest(), p2.manifest(), pa.manifest()
-
-            def func(f):
-                f = copiesget(f, f) # may be wrong for merges with copies
-                fl1, fl2, fla = m1.flags(f), m2.flags(f), ma.flags(f)
-                if fl1 == fl2:
-                    return fl1
-                if fl1 == fla:
-                    return fl2
-                if fl2 == fla:
-                    return fl1
-                return '' # punt for conflicts
-
-        return func
-
-    @propertycache
-    def _flagfunc(self):
-        return self._repo.dirstate.flagfunc(self._buildflagfunc)
-
     @propertycache
     def _status(self):
         return self._repo.status()
@@ -1206,26 +1170,6 @@
         return self._status.removed
     def deleted(self):
         return self._status.deleted
-    @propertycache
-    def _copies(self):
-        p1copies = {}
-        p2copies = {}
-        parents = self._repo.dirstate.parents()
-        p1manifest = self._repo[parents[0]].manifest()
-        p2manifest = self._repo[parents[1]].manifest()
-        narrowmatch = self._repo.narrowmatch()
-        for dst, src in self._repo.dirstate.copies().items():
-            if not narrowmatch(dst):
-                continue
-            if src in p1manifest:
-                p1copies[dst] = src
-            elif src in p2manifest:
-                p2copies[dst] = src
-        return p1copies, p2copies
-    def p1copies(self):
-        return self._copies[0]
-    def p2copies(self):
-        return self._copies[1]
     def branch(self):
         return encoding.tolocal(self._extra['branch'])
     def closesbranch(self):
@@ -1257,33 +1201,10 @@
     def children(self):
         return []
 
-    def flags(self, path):
-        if r'_manifest' in self.__dict__:
-            try:
-                return self._manifest.flags(path)
-            except KeyError:
-                return ''
-
-        try:
-            return self._flagfunc(path)
-        except OSError:
-            return ''
-
     def ancestor(self, c2):
         """return the "best" ancestor context of self and c2"""
         return self._parents[0].ancestor(c2) # punt on two parents for now
 
-    def walk(self, match):
-        '''Generates matching file names.'''
-        return sorted(self._repo.dirstate.walk(self._repo.narrowmatch(match),
-                                               subrepos=sorted(self.substate),
-                                               unknown=True, ignored=False))
-
-    def matches(self, match):
-        match = self._repo.narrowmatch(match)
-        ds = self._repo.dirstate
-        return sorted(f for f in ds.matches(match) if ds[f] != 'r')
-
     def ancestors(self):
         for p in self._parents:
             yield p
@@ -1301,18 +1222,6 @@
 
         """
 
-        with self._repo.dirstate.parentchange():
-            for f in self.modified() + self.added():
-                self._repo.dirstate.normal(f)
-            for f in self.removed():
-                self._repo.dirstate.drop(f)
-            self._repo.dirstate.setparents(node)
-
-        # write changes out explicitly, because nesting wlock at
-        # runtime may prevent 'wlock.release()' in 'repo.commit()'
-        # from immediately doing so for subsequent changing files
-        self._repo.dirstate.write(self._repo.currenttransaction())
-
     def dirty(self, missing=False, merge=True, branch=True):
         return False
 
@@ -1327,7 +1236,14 @@
     """
     def __init__(self, repo, text="", user=None, date=None, extra=None,
                  changes=None):
-        super(workingctx, self).__init__(repo, text, user, date, extra, changes)
+        branch = None
+        if not extra or 'branch' not in extra:
+            try:
+                branch = repo.dirstate.branch()
+            except UnicodeDecodeError:
+                raise error.Abort(_('branch name not in UTF-8!'))
+        super(workingctx, self).__init__(repo, text, user, date, extra, changes,
+                                         branch=branch)
 
     def __iter__(self):
         d = self._repo.dirstate
@@ -1355,6 +1271,54 @@
         self._manifest
         return super(workingctx, self)._fileinfo(path)
 
+    def _buildflagfunc(self):
+        # Create a fallback function for getting file flags when the
+        # filesystem doesn't support them
+
+        copiesget = self._repo.dirstate.copies().get
+        parents = self.parents()
+        if len(parents) < 2:
+            # when we have one parent, it's easy: copy from parent
+            man = parents[0].manifest()
+            def func(f):
+                f = copiesget(f, f)
+                return man.flags(f)
+        else:
+            # merges are tricky: we try to reconstruct the unstored
+            # result from the merge (issue1802)
+            p1, p2 = parents
+            pa = p1.ancestor(p2)
+            m1, m2, ma = p1.manifest(), p2.manifest(), pa.manifest()
+
+            def func(f):
+                f = copiesget(f, f) # may be wrong for merges with copies
+                fl1, fl2, fla = m1.flags(f), m2.flags(f), ma.flags(f)
+                if fl1 == fl2:
+                    return fl1
+                if fl1 == fla:
+                    return fl2
+                if fl2 == fla:
+                    return fl1
+                return '' # punt for conflicts
+
+        return func
+
+    @propertycache
+    def _flagfunc(self):
+        return self._repo.dirstate.flagfunc(self._buildflagfunc)
+
+    def flags(self, path):
+        if r'_manifest' in self.__dict__:
+            try:
+                return self._manifest.flags(path)
+            except KeyError:
+                return ''
+
+        try:
+            return self._flagfunc(path)
+        except OSError:
+            return ''
+
     def filectx(self, path, filelog=None):
         """get a file context from the working directory"""
         return workingfilectx(self._repo, path, workingctx=self,
@@ -1579,6 +1543,27 @@
         return s
 
     @propertycache
+    def _copies(self):
+        p1copies = {}
+        p2copies = {}
+        parents = self._repo.dirstate.parents()
+        p1manifest = self._repo[parents[0]].manifest()
+        p2manifest = self._repo[parents[1]].manifest()
+        narrowmatch = self._repo.narrowmatch()
+        for dst, src in self._repo.dirstate.copies().items():
+            if not narrowmatch(dst):
+                continue
+            if src in p1manifest:
+                p1copies[dst] = src
+            elif src in p2manifest:
+                p2copies[dst] = src
+        return p1copies, p2copies
+    def p1copies(self):
+        return self._copies[0]
+    def p2copies(self):
+        return self._copies[1]
+
+    @propertycache
     def _manifest(self):
         """generate a manifest corresponding to the values in self._status
 
@@ -1651,8 +1636,29 @@
             match.bad = bad
         return match
 
+    def walk(self, match):
+        '''Generates matching file names.'''
+        return sorted(self._repo.dirstate.walk(self._repo.narrowmatch(match),
+                                               subrepos=sorted(self.substate),
+                                               unknown=True, ignored=False))
+
+    def matches(self, match):
+        match = self._repo.narrowmatch(match)
+        ds = self._repo.dirstate
+        return sorted(f for f in ds.matches(match) if ds[f] != 'r')
+
     def markcommitted(self, node):
-        super(workingctx, self).markcommitted(node)
+        with self._repo.dirstate.parentchange():
+            for f in self.modified() + self.added():
+                self._repo.dirstate.normal(f)
+            for f in self.removed():
+                self._repo.dirstate.drop(f)
+            self._repo.dirstate.setparents(node)
+
+        # write changes out explicitly, because nesting wlock at
+        # runtime may prevent 'wlock.release()' in 'repo.commit()'
+        # from immediately doing so for subsequent changing files
+        self._repo.dirstate.write(self._repo.currenttransaction())
 
         sparse.aftercommit(self._repo, node)
 
@@ -1767,8 +1773,7 @@
 
     def markcopied(self, src):
         """marks this file a copy of `src`"""
-        if self._repo.dirstate[self._path] in "nma":
-            self._repo.dirstate.copy(src, self._path)
+        self._repo.dirstate.copy(src, self._path)
 
     def clearunknown(self):
         """Removes conflicting items in the working directory so that
@@ -1913,7 +1918,7 @@
         if self.isdirty(path):
             return self._cache[path]['copied']
         else:
-            raise error.ProgrammingError('copydata() called on clean context')
+            return None
 
     def flags(self, path):
         if self.isdirty(path):
@@ -2055,7 +2060,7 @@
         else:
             parents = (self._repo[parents[0]], self._repo[parents[1]])
 
-        files = self._cache.keys()
+        files = self.files()
         def getfile(repo, memctx, path):
             if self._cache[path]['exists']:
                 return memfilectx(repo, memctx, path,
@@ -2305,7 +2310,8 @@
 
     def __init__(self, repo, parents, text, files, filectxfn, user=None,
                  date=None, extra=None, branch=None, editor=False):
-        super(memctx, self).__init__(repo, text, user, date, extra)
+        super(memctx, self).__init__(repo, text, user, date, extra,
+                                     branch=branch)
         self._rev = None
         self._node = None
         parents = [(p or nullid) for p in parents]
@@ -2313,8 +2319,6 @@
         self._parents = [self._repo[p] for p in (p1, p2)]
         files = sorted(set(files))
         self._files = files
-        if branch is not None:
-            self._extra['branch'] = encoding.fromlocal(branch)
         self.substate = {}
 
         if isinstance(filectxfn, patch.filestore):
--- a/mercurial/copies.py	Mon May 20 10:08:28 2019 +0200
+++ b/mercurial/copies.py	Mon May 20 11:40:47 2019 -0400
@@ -108,39 +108,56 @@
     return min(limit, a, b)
 
 def _chain(src, dst, a, b):
-    """chain two sets of copies a->b"""
+    """chain two sets of copies 'a' and 'b'"""
+
+    # When chaining copies in 'a' (from 'src' via some other commit 'mid') with
+    # copies in 'b' (from 'mid' to 'dst'), we can get the different cases in the
+    # following table (not including trivial cases). For example, case 2 is
+    # where a file existed in 'src' and remained under that name in 'mid' and
+    # then was renamed between 'mid' and 'dst'.
+    #
+    # case src mid dst result
+    #   1   x   y   -    -
+    #   2   x   y   y   x->y
+    #   3   x   y   x    -
+    #   4   x   y   z   x->z
+    #   5   -   x   y    -
+    #   6   x   x   y   x->y
+
+    # Initialize result ('t') from 'a'. This catches cases 1 & 2. We'll remove
+    # case 1 later. We'll also catch cases 3 & 4 here. Case 4 will be
+    # overwritten later, and case 3 will be removed later.
     t = a.copy()
     for k, v in b.iteritems():
         if v in t:
-            # found a chain
-            if t[v] != k:
-                # file wasn't renamed back to itself
-                t[k] = t[v]
-            if v not in dst:
-                # chain was a rename, not a copy
-                del t[v]
-        if v in src:
-            # file is a copy of an existing file
+            # Found a chain, i.e. cases 3 & 4. We'll remove case 3 later.
+            t[k] = t[v]
+        else:
+            # Renamed only in 'b', i.e. cases 5 & 6. We'll remove case 5 later.
             t[k] = v
 
     for k, v in list(t.items()):
-        # remove criss-crossed copies
-        if k in src and v in dst:
+        # remove copies from files that didn't exist, i.e. case 5
+        if v not in src:
             del t[k]
-        # remove copies to files that were then removed
+        # remove criss-crossed copies, i.e. case 3
+        elif k in src and v in dst:
+            del t[k]
+        # remove copies to files that were then removed, i.e. case 1
+        # and file 'y' in cases 3 & 4 (in case of rename)
         elif k not in dst:
             del t[k]
 
     return t
 
-def _tracefile(fctx, am, limit=node.nullrev):
+def _tracefile(fctx, am, limit):
     """return file context that is the ancestor of fctx present in ancestor
     manifest am, stopping after the first ancestor lower than limit"""
 
     for f in fctx.ancestors():
         if am.get(f.path(), None) == f.filenode():
             return f
-        if limit >= 0 and not f.isintroducedafter(limit):
+        if not f.isintroducedafter(limit):
             return None
 
 def _dirstatecopies(repo, match=None):
@@ -204,9 +221,9 @@
     ancestrycontext = a._repo.changelog.ancestors([b.rev()], inclusive=True)
 
     if debug:
-        dbg('debug.copies:      missing file to search: %d\n' % len(missing))
+        dbg('debug.copies:      missing files to search: %d\n' % len(missing))
 
-    for f in missing:
+    for f in sorted(missing):
         if debug:
             dbg('debug.copies:        tracing file: %s\n' % f)
         fctx = b[f]
@@ -353,81 +370,6 @@
     return _chain(x, y, _backwardrenames(x, a, match=match),
                   _forwardcopies(a, y, match=match))
 
-def _computenonoverlap(repo, c1, c2, addedinm1, addedinm2, baselabel=''):
-    """Computes, based on addedinm1 and addedinm2, the files exclusive to c1
-    and c2. This is its own function so extensions can easily wrap this call
-    to see what files mergecopies is about to process.
-
-    Even though c1 and c2 are not used in this function, they are useful in
-    other extensions for being able to read the file nodes of the changed files.
-
-    "baselabel" can be passed to help distinguish the multiple computations
-    done in the graft case.
-    """
-    u1 = sorted(addedinm1 - addedinm2)
-    u2 = sorted(addedinm2 - addedinm1)
-
-    header = "  unmatched files in %s"
-    if baselabel:
-        header += ' (from %s)' % baselabel
-    if u1:
-        repo.ui.debug("%s:\n   %s\n" % (header % 'local', "\n   ".join(u1)))
-    if u2:
-        repo.ui.debug("%s:\n   %s\n" % (header % 'other', "\n   ".join(u2)))
-
-    return u1, u2
-
-def _makegetfctx(ctx):
-    """return a 'getfctx' function suitable for _checkcopies usage
-
-    We have to re-setup the function building 'filectx' for each
-    '_checkcopies' to ensure the linkrev adjustment is properly setup for
-    each. Linkrev adjustment is important to avoid bug in rename
-    detection. Moreover, having a proper '_ancestrycontext' setup ensures
-    the performance impact of this adjustment is kept limited. Without it,
-    each file could do a full dag traversal making the time complexity of
-    the operation explode (see issue4537).
-
-    This function exists here mostly to limit the impact on stable. Feel
-    free to refactor on default.
-    """
-    rev = ctx.rev()
-    repo = ctx._repo
-    ac = getattr(ctx, '_ancestrycontext', None)
-    if ac is None:
-        revs = [rev]
-        if rev is None:
-            revs = [p.rev() for p in ctx.parents()]
-        ac = repo.changelog.ancestors(revs, inclusive=True)
-        ctx._ancestrycontext = ac
-    def makectx(f, n):
-        if n in node.wdirfilenodeids:  # in a working context?
-            if ctx.rev() is None:
-                return ctx.filectx(f)
-            return repo[None][f]
-        fctx = repo.filectx(f, fileid=n)
-        # setup only needed for filectx not create from a changectx
-        fctx._ancestrycontext = ac
-        fctx._descendantrev = rev
-        return fctx
-    return util.lrucachefunc(makectx)
-
-def _combinecopies(copyfrom, copyto, finalcopy, diverge, incompletediverge):
-    """combine partial copy paths"""
-    remainder = {}
-    for f in copyfrom:
-        if f in copyto:
-            finalcopy[copyto[f]] = copyfrom[f]
-            del copyto[f]
-    for f in incompletediverge:
-        assert f not in diverge
-        ic = incompletediverge[f]
-        if ic[0] in copyto:
-            diverge[f] = [copyto[ic[0]], ic[1]]
-        else:
-            remainder[f] = ic
-    return remainder
-
 def mergecopies(repo, c1, c2, base):
     """
     Finds moves and copies between context c1 and c2 that are relevant for
@@ -485,7 +427,14 @@
         return _dirstatecopies(repo, narrowmatch), {}, {}, {}, {}
 
     copytracing = repo.ui.config('experimental', 'copytrace')
-    boolctrace = stringutil.parsebool(copytracing)
+    if stringutil.parsebool(copytracing) is False:
+        # stringutil.parsebool() returns None when it is unable to parse the
+        # value, so we should rely on making sure copytracing is on such cases
+        return {}, {}, {}, {}, {}
+
+    if usechangesetcentricalgo(repo):
+        # The heuristics don't make sense when we need changeset-centric algos
+        return _fullcopytracing(repo, c1, c2, base)
 
     # Copy trace disabling is explicitly below the node == p1 logic above
     # because the logic above is required for a simple copy to be kept across a
@@ -497,10 +446,6 @@
         if _isfullcopytraceable(repo, c1, base):
             return _fullcopytracing(repo, c1, c2, base)
         return _heuristicscopytracing(repo, c1, c2, base)
-    elif boolctrace is False:
-        # stringutil.parsebool() returns None when it is unable to parse the
-        # value, so we should rely on making sure copytracing is on such cases
-        return {}, {}, {}, {}, {}
     else:
         return _fullcopytracing(repo, c1, c2, base)
 
@@ -522,6 +467,23 @@
         return commits < sourcecommitlimit
     return False
 
+def _checksinglesidecopies(src, dsts1, m1, m2, mb, c2, base,
+                           copy, renamedelete):
+    if src not in m2:
+        # deleted on side 2
+        if src not in m1:
+            # renamed on side 1, deleted on side 2
+            renamedelete[src] = dsts1
+    elif m2[src] != mb[src]:
+        if not _related(c2[src], base[src]):
+            return
+        # modified on side 2
+        for dst in dsts1:
+            if dst not in m2:
+                # dst not added on side 2 (handle as regular
+                # "both created" case in manifestmerge otherwise)
+                copy[dst] = src
+
 def _fullcopytracing(repo, c1, c2, base):
     """ The full copytracing algorithm which finds all the new files that were
     added from merge base up to the top commit and for each file it checks if
@@ -530,159 +492,84 @@
     This is pretty slow when a lot of changesets are involved but will track all
     the copies.
     """
-    # In certain scenarios (e.g. graft, update or rebase), base can be
-    # overridden We still need to know a real common ancestor in this case We
-    # can't just compute _c1.ancestor(_c2) and compare it to ca, because there
-    # can be multiple common ancestors, e.g. in case of bidmerge.  Because our
-    # caller may not know if the revision passed in lieu of the CA is a genuine
-    # common ancestor or not without explicitly checking it, it's better to
-    # determine that here.
-    #
-    # base.isancestorof(wc) is False, work around that
-    _c1 = c1.p1() if c1.rev() is None else c1
-    _c2 = c2.p1() if c2.rev() is None else c2
-    # an endpoint is "dirty" if it isn't a descendant of the merge base
-    # if we have a dirty endpoint, we need to trigger graft logic, and also
-    # keep track of which endpoint is dirty
-    dirtyc1 = not base.isancestorof(_c1)
-    dirtyc2 = not base.isancestorof(_c2)
-    graft = dirtyc1 or dirtyc2
-    tca = base
-    if graft:
-        tca = _c1.ancestor(_c2)
-
-    limit = _findlimit(repo, c1, c2)
-    repo.ui.debug("  searching for copies back to rev %d\n" % limit)
-
     m1 = c1.manifest()
     m2 = c2.manifest()
     mb = base.manifest()
 
-    # gather data from _checkcopies:
-    # - diverge = record all diverges in this dict
-    # - copy = record all non-divergent copies in this dict
-    # - fullcopy = record all copies in this dict
-    # - incomplete = record non-divergent partial copies here
-    # - incompletediverge = record divergent partial copies here
-    diverge = {} # divergence data is shared
-    incompletediverge  = {}
-    data1 = {'copy': {},
-             'fullcopy': {},
-             'incomplete': {},
-             'diverge': diverge,
-             'incompletediverge': incompletediverge,
-            }
-    data2 = {'copy': {},
-             'fullcopy': {},
-             'incomplete': {},
-             'diverge': diverge,
-             'incompletediverge': incompletediverge,
-            }
+    copies1 = pathcopies(base, c1)
+    copies2 = pathcopies(base, c2)
+
+    inversecopies1 = {}
+    inversecopies2 = {}
+    for dst, src in copies1.items():
+        inversecopies1.setdefault(src, []).append(dst)
+    for dst, src in copies2.items():
+        inversecopies2.setdefault(src, []).append(dst)
+
+    copy = {}
+    diverge = {}
+    renamedelete = {}
+    allsources = set(inversecopies1) | set(inversecopies2)
+    for src in allsources:
+        dsts1 = inversecopies1.get(src)
+        dsts2 = inversecopies2.get(src)
+        if dsts1 and dsts2:
+            # copied/renamed on both sides
+            if src not in m1 and src not in m2:
+                # renamed on both sides
+                dsts1 = set(dsts1)
+                dsts2 = set(dsts2)
+                # If there's some overlap in the rename destinations, we
+                # consider it not divergent. For example, if side 1 copies 'a'
+                # to 'b' and 'c' and deletes 'a', and side 2 copies 'a' to 'c'
+                # and 'd' and deletes 'a'.
+                if dsts1 & dsts2:
+                    for dst in (dsts1 & dsts2):
+                        copy[dst] = src
+                else:
+                    diverge[src] = sorted(dsts1 | dsts2)
+            elif src in m1 and src in m2:
+                # copied on both sides
+                dsts1 = set(dsts1)
+                dsts2 = set(dsts2)
+                for dst in (dsts1 & dsts2):
+                    copy[dst] = src
+            # TODO: Handle cases where it was renamed on one side and copied
+            # on the other side
+        elif dsts1:
+            # copied/renamed only on side 1
+            _checksinglesidecopies(src, dsts1, m1, m2, mb, c2, base,
+                                   copy, renamedelete)
+        elif dsts2:
+            # copied/renamed only on side 2
+            _checksinglesidecopies(src, dsts2, m2, m1, mb, c1, base,
+                                   copy, renamedelete)
+
+    renamedeleteset = set()
+    divergeset = set()
+    for dsts in diverge.values():
+        divergeset.update(dsts)
+    for dsts in renamedelete.values():
+        renamedeleteset.update(dsts)
 
     # find interesting file sets from manifests
     addedinm1 = m1.filesnotin(mb, repo.narrowmatch())
     addedinm2 = m2.filesnotin(mb, repo.narrowmatch())
-    bothnew = sorted(addedinm1 & addedinm2)
-    if tca == base:
-        # unmatched file from base
-        u1r, u2r = _computenonoverlap(repo, c1, c2, addedinm1, addedinm2)
-        u1u, u2u = u1r, u2r
-    else:
-        # unmatched file from base (DAG rotation in the graft case)
-        u1r, u2r = _computenonoverlap(repo, c1, c2, addedinm1, addedinm2,
-                                      baselabel='base')
-        # unmatched file from topological common ancestors (no DAG rotation)
-        # need to recompute this for directory move handling when grafting
-        mta = tca.manifest()
-        u1u, u2u = _computenonoverlap(repo, c1, c2,
-                                      m1.filesnotin(mta, repo.narrowmatch()),
-                                      m2.filesnotin(mta, repo.narrowmatch()),
-                                      baselabel='topological common ancestor')
-
-    for f in u1u:
-        _checkcopies(c1, c2, f, base, tca, dirtyc1, limit, data1)
-
-    for f in u2u:
-        _checkcopies(c2, c1, f, base, tca, dirtyc2, limit, data2)
-
-    copy = dict(data1['copy'])
-    copy.update(data2['copy'])
-    fullcopy = dict(data1['fullcopy'])
-    fullcopy.update(data2['fullcopy'])
-
-    if dirtyc1:
-        _combinecopies(data2['incomplete'], data1['incomplete'], copy, diverge,
-                       incompletediverge)
-    if dirtyc2:
-        _combinecopies(data1['incomplete'], data2['incomplete'], copy, diverge,
-                       incompletediverge)
-
-    renamedelete = {}
-    renamedeleteset = set()
-    divergeset = set()
-    for of, fl in list(diverge.items()):
-        if len(fl) == 1 or of in c1 or of in c2:
-            del diverge[of] # not actually divergent, or not a rename
-            if of not in c1 and of not in c2:
-                # renamed on one side, deleted on the other side, but filter
-                # out files that have been renamed and then deleted
-                renamedelete[of] = [f for f in fl if f in c1 or f in c2]
-                renamedeleteset.update(fl) # reverse map for below
-        else:
-            divergeset.update(fl) # reverse map for below
+    u1 = sorted(addedinm1 - addedinm2)
+    u2 = sorted(addedinm2 - addedinm1)
 
-    if bothnew:
-        repo.ui.debug("  unmatched files new in both:\n   %s\n"
-                      % "\n   ".join(bothnew))
-    bothdiverge = {}
-    bothincompletediverge = {}
-    remainder = {}
-    both1 = {'copy': {},
-             'fullcopy': {},
-             'incomplete': {},
-             'diverge': bothdiverge,
-             'incompletediverge': bothincompletediverge
-            }
-    both2 = {'copy': {},
-             'fullcopy': {},
-             'incomplete': {},
-             'diverge': bothdiverge,
-             'incompletediverge': bothincompletediverge
-            }
-    for f in bothnew:
-        _checkcopies(c1, c2, f, base, tca, dirtyc1, limit, both1)
-        _checkcopies(c2, c1, f, base, tca, dirtyc2, limit, both2)
-    if dirtyc1 and dirtyc2:
-        remainder = _combinecopies(both2['incomplete'], both1['incomplete'],
-                                   copy, bothdiverge, bothincompletediverge)
-        remainder1 = _combinecopies(both1['incomplete'], both2['incomplete'],
-                                   copy, bothdiverge, bothincompletediverge)
-        remainder.update(remainder1)
-    elif dirtyc1:
-        # incomplete copies may only be found on the "dirty" side for bothnew
-        assert not both2['incomplete']
-        remainder = _combinecopies({}, both1['incomplete'], copy, bothdiverge,
-                                   bothincompletediverge)
-    elif dirtyc2:
-        assert not both1['incomplete']
-        remainder = _combinecopies({}, both2['incomplete'], copy, bothdiverge,
-                                   bothincompletediverge)
-    else:
-        # incomplete copies and divergences can't happen outside grafts
-        assert not both1['incomplete']
-        assert not both2['incomplete']
-        assert not bothincompletediverge
-    for f in remainder:
-        assert f not in bothdiverge
-        ic = remainder[f]
-        if ic[0] in (m1 if dirtyc1 else m2):
-            # backed-out rename on one side, but watch out for deleted files
-            bothdiverge[f] = ic
-    for of, fl in bothdiverge.items():
-        if len(fl) == 2 and fl[0] == fl[1]:
-            copy[fl[0]] = of # not actually divergent, just matching renames
+    header = "  unmatched files in %s"
+    if u1:
+        repo.ui.debug("%s:\n   %s\n" % (header % 'local', "\n   ".join(u1)))
+    if u2:
+        repo.ui.debug("%s:\n   %s\n" % (header % 'other', "\n   ".join(u2)))
 
-    if fullcopy and repo.ui.debugflag:
+    fullcopy = copies1.copy()
+    fullcopy.update(copies2)
+    if not fullcopy:
+        return copy, {}, diverge, renamedelete, {}
+
+    if repo.ui.debugflag:
         repo.ui.debug("  all copies found (* = to merge, ! = divergent, "
                       "% = renamed and deleted):\n")
         for f in sorted(fullcopy):
@@ -697,16 +584,10 @@
                                                               note))
     del divergeset
 
-    if not fullcopy:
-        return copy, {}, diverge, renamedelete, {}
-
     repo.ui.debug("  checking for directory renames\n")
 
     # generate a directory move map
     d1, d2 = c1.dirs(), c2.dirs()
-    # Hack for adding '', which is not otherwise added, to d1 and d2
-    d1.addpath('/')
-    d2.addpath('/')
     invalid = set()
     dirmove = {}
 
@@ -746,7 +627,7 @@
 
     movewithdir = {}
     # check unaccounted nonoverlapping files against directory moves
-    for f in u1r + u2r:
+    for f in u1 + u2:
         if f not in fullcopy:
             for d in dirmove:
                 if f.startswith(d):
@@ -893,99 +774,6 @@
     except StopIteration:
         return False
 
-def _checkcopies(srcctx, dstctx, f, base, tca, remotebase, limit, data):
-    """
-    check possible copies of f from msrc to mdst
-
-    srcctx = starting context for f in msrc
-    dstctx = destination context for f in mdst
-    f = the filename to check (as in msrc)
-    base = the changectx used as a merge base
-    tca = topological common ancestor for graft-like scenarios
-    remotebase = True if base is outside tca::srcctx, False otherwise
-    limit = the rev number to not search beyond
-    data = dictionary of dictionary to store copy data. (see mergecopies)
-
-    note: limit is only an optimization, and provides no guarantee that
-    irrelevant revisions will not be visited
-    there is no easy way to make this algorithm stop in a guaranteed way
-    once it "goes behind a certain revision".
-    """
-
-    msrc = srcctx.manifest()
-    mdst = dstctx.manifest()
-    mb = base.manifest()
-    mta = tca.manifest()
-    # Might be true if this call is about finding backward renames,
-    # This happens in the case of grafts because the DAG is then rotated.
-    # If the file exists in both the base and the source, we are not looking
-    # for a rename on the source side, but on the part of the DAG that is
-    # traversed backwards.
-    #
-    # In the case there is both backward and forward renames (before and after
-    # the base) this is more complicated as we must detect a divergence.
-    # We use 'backwards = False' in that case.
-    backwards = not remotebase and base != tca and f in mb
-    getsrcfctx = _makegetfctx(srcctx)
-    getdstfctx = _makegetfctx(dstctx)
-
-    if msrc[f] == mb.get(f) and not remotebase:
-        # Nothing to merge
-        return
-
-    of = None
-    seen = {f}
-    for oc in getsrcfctx(f, msrc[f]).ancestors():
-        of = oc.path()
-        if of in seen:
-            # check limit late - grab last rename before
-            if oc.linkrev() < limit:
-                break
-            continue
-        seen.add(of)
-
-        # remember for dir rename detection
-        if backwards:
-            data['fullcopy'][of] = f # grafting backwards through renames
-        else:
-            data['fullcopy'][f] = of
-        if of not in mdst:
-            continue # no match, keep looking
-        if mdst[of] == mb.get(of):
-            return # no merge needed, quit early
-        c2 = getdstfctx(of, mdst[of])
-        # c2 might be a plain new file on added on destination side that is
-        # unrelated to the droids we are looking for.
-        cr = _related(oc, c2)
-        if cr and (of == f or of == c2.path()): # non-divergent
-            if backwards:
-                data['copy'][of] = f
-            elif of in mb:
-                data['copy'][f] = of
-            elif remotebase: # special case: a <- b <- a -> b "ping-pong" rename
-                data['copy'][of] = f
-                del data['fullcopy'][f]
-                data['fullcopy'][of] = f
-            else: # divergence w.r.t. graft CA on one side of topological CA
-                for sf in seen:
-                    if sf in mb:
-                        assert sf not in data['diverge']
-                        data['diverge'][sf] = [f, of]
-                        break
-            return
-
-    if of in mta:
-        if backwards or remotebase:
-            data['incomplete'][of] = f
-        else:
-            for sf in seen:
-                if sf in mb:
-                    if tca == base:
-                        data['diverge'].setdefault(sf, []).append(f)
-                    else:
-                        data['incompletediverge'][sf] = [of, f]
-                    return
-
 def duplicatecopies(repo, wctx, rev, fromrev, skiprev=None):
     """reproduce copies from fromrev to rev in the dirstate
 
@@ -1005,8 +793,7 @@
         # metadata across the rebase anyway).
         exclude = pathcopies(repo[fromrev], repo[skiprev])
     for dst, src in pathcopies(repo[fromrev], repo[rev]).iteritems():
-        # copies.pathcopies returns backward renames, so dst might not
-        # actually be in the dirstate
         if dst in exclude:
             continue
-        wctx[dst].markcopied(src)
+        if dst in wctx:
+            wctx[dst].markcopied(src)
--- a/mercurial/dirstate.py	Mon May 20 10:08:28 2019 +0200
+++ b/mercurial/dirstate.py	Mon May 20 11:40:47 2019 -0400
@@ -27,6 +27,12 @@
     util,
 )
 
+try:
+    from . import rustext
+    rustext.__name__  # force actual import (see hgdemandimport)
+except ImportError:
+    rustext = None
+
 parsers = policy.importmod(r'parsers')
 
 propertycache = util.propertycache
@@ -656,8 +662,6 @@
         self._dirty = False
 
     def _dirignore(self, f):
-        if f == '.':
-            return False
         if self._ignore(f):
             return True
         for p in util.finddirs(f):
@@ -751,15 +755,16 @@
                 del files[i]
             j += 1
 
-        if not files or '.' in files:
-            files = ['.']
+        if not files or '' in files:
+            files = ['']
+            # constructing the foldmap is expensive, so don't do it for the
+            # common case where files is ['']
+            normalize = None
         results = dict.fromkeys(subrepos)
         results['.hg'] = None
 
         for ff in files:
-            # constructing the foldmap is expensive, so don't do it for the
-            # common case where files is ['.']
-            if normalize and ff != '.':
+            if normalize:
                 nf = normalize(ff, False, True)
             else:
                 nf = ff
@@ -903,9 +908,7 @@
                 if visitentries == 'this' or visitentries == 'all':
                     visitentries = None
                 skip = None
-                if nd == '.':
-                    nd = ''
-                else:
+                if nd != '':
                     skip = '.hg'
                 try:
                     entries = listdir(join(nd), stat=True, skip=skip)
@@ -1465,7 +1468,12 @@
         # parsing the dirstate.
         #
         # (we cannot decorate the function directly since it is in a C module)
-        parse_dirstate = util.nogc(parsers.parse_dirstate)
+        if rustext is not None:
+            parse_dirstate = rustext.dirstate.parse_dirstate
+        else:
+            parse_dirstate = parsers.parse_dirstate
+
+        parse_dirstate = util.nogc(parse_dirstate)
         p = parse_dirstate(self._map, self.copymap, st)
         if not self._dirtyparents:
             self.setparents(*p)
@@ -1476,7 +1484,12 @@
         self.get = self._map.get
 
     def write(self, st, now):
-        st.write(parsers.pack_dirstate(self._map, self.copymap,
+        if rustext is not None:
+            pack_dirstate = rustext.dirstate.pack_dirstate
+        else:
+            pack_dirstate = parsers.pack_dirstate
+
+        st.write(pack_dirstate(self._map, self.copymap,
                                        self.parents(), now))
         st.close()
         self._dirtyparents = False
--- a/mercurial/discovery.py	Mon May 20 10:08:28 2019 +0200
+++ b/mercurial/discovery.py	Mon May 20 11:40:47 2019 -0400
@@ -343,10 +343,19 @@
     # 1. Check for new branches on the remote.
     if newbranches and not newbranch:  # new branch requires --new-branch
         branchnames = ', '.join(sorted(newbranches))
-        raise error.Abort(_("push creates new remote branches: %s!")
-                           % branchnames,
-                         hint=_("use 'hg push --new-branch' to create"
-                                " new remote branches"))
+        # Calculate how many of the new branches are closed branches
+        closedbranches = set()
+        for tag, heads, tip, isclosed in repo.branchmap().iterbranches():
+            if isclosed:
+                closedbranches.add(tag)
+        closedbranches = (closedbranches & set(newbranches))
+        if closedbranches:
+            errmsg = (_("push creates new remote branches: %s (%d closed)!")
+                        % (branchnames, len(closedbranches)))
+        else:
+            errmsg = (_("push creates new remote branches: %s!")% branchnames)
+        hint=_("use 'hg push --new-branch' to create new remote branches")
+        raise error.Abort(errmsg, hint=hint)
 
     # 2. Find heads that we need not warn about
     nowarnheads = _nowarnheads(pushop)
--- a/mercurial/exchange.py	Mon May 20 10:08:28 2019 +0200
+++ b/mercurial/exchange.py	Mon May 20 11:40:47 2019 -0400
@@ -539,10 +539,12 @@
     # get lock as we might write phase data
     wlock = lock = None
     try:
-        # bundle2 push may receive a reply bundle touching bookmarks or other
-        # things requiring the wlock. Take it now to ensure proper ordering.
+        # bundle2 push may receive a reply bundle touching bookmarks
+        # requiring the wlock. Take it now to ensure proper ordering.
         maypushback = pushop.ui.configbool('experimental', 'bundle2.pushback')
-        if (not _forcebundle1(pushop)) and maypushback:
+        if ((not _forcebundle1(pushop)) and
+            maypushback and
+            not bookmod.bookmarksinstore(repo)):
             wlock = pushop.repo.wlock()
         lock = pushop.repo.lock()
         pushop.trmanager = transactionmanager(pushop.repo,
@@ -1548,7 +1550,10 @@
             raise error.Abort(msg)
 
     pullop.trmanager = transactionmanager(repo, 'pull', remote.url())
-    with repo.wlock(), repo.lock(), pullop.trmanager:
+    wlock = util.nullcontextmanager()
+    if not bookmod.bookmarksinstore(repo):
+        wlock = repo.wlock()
+    with wlock, repo.lock(), pullop.trmanager:
         # Use the modern wire protocol, if available.
         if remote.capable('command-changesetdata'):
             exchangev2.pull(pullop)
@@ -2395,7 +2400,8 @@
             try:
                 def gettransaction():
                     if not lockandtr[2]:
-                        lockandtr[0] = repo.wlock()
+                        if not bookmod.bookmarksinstore(repo):
+                            lockandtr[0] = repo.wlock()
                         lockandtr[1] = repo.lock()
                         lockandtr[2] = repo.transaction(source)
                         lockandtr[2].hookargs['source'] = source
--- a/mercurial/extensions.py	Mon May 20 10:08:28 2019 +0200
+++ b/mercurial/extensions.py	Mon May 20 11:40:47 2019 -0400
@@ -221,14 +221,7 @@
     extsetup = getattr(_extensions[name], 'extsetup', None)
     if extsetup:
         try:
-            try:
-                extsetup(ui)
-            except TypeError:
-                if pycompat.getargspec(extsetup).args:
-                    raise
-                ui.deprecwarn("extsetup for '%s' must take a ui argument"
-                              % name, "4.9")
-                extsetup() # old extsetup with no ui argument
+            extsetup(ui)
         except Exception as inst:
             ui.traceback(force=True)
             msg = stringutil.forcebytestr(inst)
--- a/mercurial/exthelper.py	Mon May 20 10:08:28 2019 +0200
+++ b/mercurial/exthelper.py	Mon May 20 11:40:47 2019 -0400
@@ -15,9 +15,12 @@
     commands,
     error,
     extensions,
+    pycompat,
     registrar,
 )
 
+from hgdemandimport import tracing
+
 class exthelper(object):
     """Helper for modular extension setup
 
@@ -135,7 +138,8 @@
         for cont, funcname, wrapper in self._functionwrappers:
             extensions.wrapfunction(cont, funcname, wrapper)
         for c in self._uicallables:
-            c(ui)
+            with tracing.log(b'finaluisetup: %s', pycompat.sysbytes(repr(c))):
+                c(ui)
 
     def finaluipopulate(self, ui):
         """Method to be used as the extension uipopulate
@@ -175,7 +179,8 @@
                     entry[1].append(opt)
 
         for c in self._extcallables:
-            c(ui)
+            with tracing.log(b'finalextsetup: %s', pycompat.sysbytes(repr(c))):
+                c(ui)
 
     def finalreposetup(self, ui, repo):
         """Method to be used as the extension reposetup
@@ -187,7 +192,8 @@
         - Changes to repo.__class__, repo.dirstate.__class__
         """
         for c in self._repocallables:
-            c(ui, repo)
+            with tracing.log(b'finalreposetup: %s', pycompat.sysbytes(repr(c))):
+                c(ui, repo)
 
     def uisetup(self, call):
         """Decorated function will be executed during uisetup
--- a/mercurial/graphmod.py	Mon May 20 10:08:28 2019 +0200
+++ b/mercurial/graphmod.py	Mon May 20 11:40:47 2019 -0400
@@ -469,22 +469,6 @@
     while len(text) < len(lines):
         text.append("")
 
-    if any(len(char) > 1 for char in edgemap.values()):
-        # limit drawing an edge to the first or last N lines of the current
-        # section the rest of the edge is drawn like a parent line.
-        parent = state['styles'][PARENT][-1:]
-        def _drawgp(char, i):
-            # should a grandparent character be drawn for this line?
-            if len(char) < 2:
-                return True
-            num = int(char[:-1])
-            # either skip first num lines or take last num lines, based on sign
-            return -num <= i if num < 0 else (len(lines) - i) <= num
-        for i, line in enumerate(lines):
-            line[:] = [c[-1:] if _drawgp(c, i) else parent for c in line]
-        edgemap.update(
-            (e, (c if len(c) < 2 else parent)) for e, c in edgemap.items())
-
     # print lines
     indentation_level = max(ncols, ncols + coldiff)
     lines = ["%-*s " % (2 * indentation_level, "".join(line)) for line in lines]
--- a/mercurial/hbisect.py	Mon May 20 10:08:28 2019 +0200
+++ b/mercurial/hbisect.py	Mon May 20 11:40:47 2019 -0400
@@ -32,6 +32,7 @@
     if searching for a first bad one.
     """
 
+    repo = repo.unfiltered()
     changelog = repo.changelog
     clparents = changelog.parentrevs
     skip = {changelog.rev(n) for n in state['skip']}
@@ -139,7 +140,7 @@
     state = {'current': [], 'good': [], 'bad': [], 'skip': []}
     for l in repo.vfs.tryreadlines("bisect.state"):
         kind, node = l[:-1].split()
-        node = repo.lookup(node)
+        node = repo.unfiltered().lookup(node)
         if kind not in state:
             raise error.Abort(_("unknown bisect kind %s") % kind)
         state[kind].append(node)
@@ -184,7 +185,7 @@
     """
     state = load_state(repo)
     if status in ('good', 'bad', 'skip', 'current'):
-        return map(repo.changelog.rev, state[status])
+        return map(repo.unfiltered().changelog.rev, state[status])
     else:
         # In the following sets, we do *not* call 'bisect()' with more
         # than one level of recursion, because that can be very, very
@@ -268,6 +269,7 @@
     return None
 
 def printresult(ui, repo, state, displayer, nodes, good):
+    repo = repo.unfiltered()
     if len(nodes) == 1:
         # narrowed it down to a single revision
         if good:
--- a/mercurial/help/config.txt	Mon May 20 10:08:28 2019 +0200
+++ b/mercurial/help/config.txt	Mon May 20 11:40:47 2019 -0400
@@ -438,6 +438,10 @@
 ``commands``
 ------------
 
+``commit.post-status``
+    Show status of files in the working directory after successful commit.
+    (default: False)
+
 ``resolve.confirm``
     Confirm before performing action if no filename is passed.
     (default: False)
@@ -866,6 +870,7 @@
     Repositories with this on-disk format require Mercurial version 4.7
 
     Enabled by default.
+
 ``revlog-compression``
     Compression algorithm used by revlog. Supported value are `zlib` and `zstd`.
     The `zlib` engine is the historical default of Mercurial. `zstd` is a newer
@@ -874,6 +879,15 @@
 
     On some system, Mercurial installation may lack `zstd` supports. Default is `zlib`.
 
+``bookmarks-in-store``
+    Store bookmarks in .hg/store/. This means that bookmarks are shared when
+    using `hg share` regardless of the `-B` option.
+
+    Repositories with this on-disk format require Mercurial version 5.1.
+
+    Disabled by default.
+
+
 ``graph``
 ---------
 
--- a/mercurial/hg.py	Mon May 20 10:08:28 2019 +0200
+++ b/mercurial/hg.py	Mon May 20 11:40:47 2019 -0400
@@ -1092,9 +1092,9 @@
     recurse()
     return 0 # exit code is zero since we found outgoing changes
 
-def verify(repo):
+def verify(repo, level=None):
     """verify the consistency of a repository"""
-    ret = verifymod.verify(repo)
+    ret = verifymod.verify(repo, level=level)
 
     # Broken subrepo references in hidden csets don't seem worth worrying about,
     # since they can't be pushed/pulled, and --hidden can be used if they are a
--- a/mercurial/hgweb/__init__.py	Mon May 20 10:08:28 2019 +0200
+++ b/mercurial/hgweb/__init__.py	Mon May 20 11:40:47 2019 -0400
@@ -38,6 +38,9 @@
     - list of virtual:real tuples (multi-repo view)
     '''
 
+    if isinstance(config, pycompat.unicode):
+        raise error.ProgrammingError(
+            'Mercurial only supports encoded strings: %r' % config)
     if ((isinstance(config, bytes) and not os.path.isdir(config)) or
         isinstance(config, dict) or isinstance(config, list)):
         # create a multi-dir interface
--- a/mercurial/hgweb/hgwebdir_mod.py	Mon May 20 10:08:28 2019 +0200
+++ b/mercurial/hgweb/hgwebdir_mod.py	Mon May 20 11:40:47 2019 -0400
@@ -414,14 +414,10 @@
                     return self.makeindex(req, res, tmpl, subdir)
 
             def _virtualdirs():
-                # Check the full virtual path, each parent, and the root ('')
-                if virtual != '':
-                    yield virtual
-
-                    for p in util.finddirs(virtual):
-                        yield p
-
-                yield ''
+                # Check the full virtual path, and each parent
+                yield virtual
+                for p in util.finddirs(virtual):
+                    yield p
 
             for virtualrepo in _virtualdirs():
                 real = repos.get(virtualrepo)
--- a/mercurial/hgweb/webutil.py	Mon May 20 10:08:28 2019 +0200
+++ b/mercurial/hgweb/webutil.py	Mon May 20 11:40:47 2019 -0400
@@ -409,12 +409,6 @@
 
 whyunstable._requires = {'repo', 'ctx'}
 
-# helper to mark a function as a new-style template keyword; can be removed
-# once old-style function gets unsupported and new-style becomes the default
-def _kwfunc(f):
-    f._requires = ()
-    return f
-
 def commonentry(repo, ctx):
     node = scmutil.binnode(ctx)
     return {
@@ -439,8 +433,8 @@
         'branches': nodebranchdict(repo, ctx),
         'tags': nodetagsdict(repo, node),
         'bookmarks': nodebookmarksdict(repo, node),
-        'parent': _kwfunc(lambda context, mapping: parents(ctx)),
-        'child': _kwfunc(lambda context, mapping: children(ctx)),
+        'parent': lambda context, mapping: parents(ctx),
+        'child': lambda context, mapping: children(ctx),
     }
 
 def changelistentry(web, ctx):
@@ -457,9 +451,9 @@
 
     entry = commonentry(repo, ctx)
     entry.update({
-        'allparents': _kwfunc(lambda context, mapping: parents(ctx)),
-        'parent': _kwfunc(lambda context, mapping: parents(ctx, rev - 1)),
-        'child': _kwfunc(lambda context, mapping: children(ctx, rev + 1)),
+        'allparents': lambda context, mapping: parents(ctx),
+        'parent': lambda context, mapping: parents(ctx, rev - 1),
+        'child': lambda context, mapping: children(ctx, rev + 1),
         'changelogtag': showtags,
         'files': files,
     })
@@ -529,7 +523,7 @@
         changesetbranch=showbranch,
         files=templateutil.mappedgenerator(_listfilesgen,
                                            args=(ctx, web.stripecount)),
-        diffsummary=_kwfunc(lambda context, mapping: diffsummary(diffstatsgen)),
+        diffsummary=lambda context, mapping: diffsummary(diffstatsgen),
         diffstat=diffstats,
         archives=web.archivelist(ctx.hex()),
         **pycompat.strkwargs(commonentry(web.repo, ctx)))
--- a/mercurial/httppeer.py	Mon May 20 10:08:28 2019 +0200
+++ b/mercurial/httppeer.py	Mon May 20 11:40:47 2019 -0400
@@ -382,6 +382,7 @@
         self._path = path
         self._url = url
         self._caps = caps
+        self.limitedarguments = caps is not None and 'httppostargs' not in caps
         self._urlopener = opener
         self._requestbuilder = requestbuilder
 
@@ -750,6 +751,9 @@
 
 @interfaceutil.implementer(repository.ipeerv2)
 class httpv2peer(object):
+
+    limitedarguments = False
+
     def __init__(self, ui, repourl, apipath, opener, requestbuilder,
                  apidescriptor):
         self.ui = ui
--- a/mercurial/localrepo.py	Mon May 20 10:08:28 2019 +0200
+++ b/mercurial/localrepo.py	Mon May 20 11:40:47 2019 -0400
@@ -910,6 +910,7 @@
         'treemanifest',
         REVLOGV2_REQUIREMENT,
         SPARSEREVLOG_REQUIREMENT,
+        bookmarks.BOOKMARKS_IN_STORE_REQUIREMENT,
     }
     _basesupported = supportedformats | {
         'store',
@@ -1069,6 +1070,8 @@
         # Signature to cached matcher instance.
         self._sparsematchercache = {}
 
+        self._extrafilterid = repoview.extrafilter(ui)
+
     def _getvfsward(self, origfunc):
         """build a ward for self.vfs"""
         rref = weakref.ref(self)
@@ -1216,11 +1219,14 @@
 
         In other word, there is always only one level of `repoview` "filtering".
         """
+        if self._extrafilterid is not None and '%' not in name:
+            name = name + '%'  + self._extrafilterid
+
         cls = repoview.newtype(self.unfiltered().__class__)
         return cls(self, name, visibilityexceptions)
 
     @mixedrepostorecache(('bookmarks', ''), ('bookmarks.current', ''),
-                         ('00changelog.i', 'store'))
+                         ('bookmarks', 'store'), ('00changelog.i', 'store'))
     def _bookmarks(self):
         return bookmarks.bmstore(self)
 
@@ -1977,7 +1983,7 @@
                 (self.vfs, 'journal.dirstate'),
                 (self.vfs, 'journal.branch'),
                 (self.vfs, 'journal.desc'),
-                (self.vfs, 'journal.bookmarks'),
+                (bookmarks.bookmarksvfs(self), 'journal.bookmarks'),
                 (self.svfs, 'journal.phaseroots'))
 
     def undofiles(self):
@@ -1992,8 +1998,9 @@
                           encoding.fromlocal(self.dirstate.branch()))
         self.vfs.write("journal.desc",
                           "%d\n%s\n" % (len(self), desc))
-        self.vfs.write("journal.bookmarks",
-                          self.vfs.tryread("bookmarks"))
+        bookmarksvfs = bookmarks.bookmarksvfs(self)
+        bookmarksvfs.write("journal.bookmarks",
+                           bookmarksvfs.tryread("bookmarks"))
         self.svfs.write("journal.phaseroots",
                            self.svfs.tryread("phaseroots"))
 
@@ -2063,8 +2070,9 @@
         vfsmap = {'plain': self.vfs, '': self.svfs}
         transaction.rollback(self.svfs, vfsmap, 'undo', ui.warn,
                              checkambigfiles=_cachedfiles)
-        if self.vfs.exists('undo.bookmarks'):
-            self.vfs.rename('undo.bookmarks', 'bookmarks', checkambig=True)
+        bookmarksvfs = bookmarks.bookmarksvfs(self)
+        if bookmarksvfs.exists('undo.bookmarks'):
+            bookmarksvfs.rename('undo.bookmarks', 'bookmarks', checkambig=True)
         if self.svfs.exists('undo.phaseroots'):
             self.svfs.rename('undo.phaseroots', 'phaseroots', checkambig=True)
         self.invalidate()
@@ -2147,6 +2155,8 @@
             for ctx in self['.'].parents():
                 ctx.manifest()  # accessing the manifest is enough
 
+            # accessing fnode cache warms the cache
+            tagsmod.fnoderevs(self.ui, unfi, unfi.changelog.revs())
             # accessing tags warm the cache
             self.tags()
             self.filtered('served').tags()
@@ -2357,7 +2367,10 @@
             node = fctx.filenode()
             if node in [fparent1, fparent2]:
                 self.ui.debug('reusing %s filelog entry\n' % fname)
-                if manifest1.flags(fname) != fctx.flags():
+                if ((fparent1 != nullid and
+                     manifest1.flags(fname) != fctx.flags()) or
+                    (fparent2 != nullid and
+                     manifest2.flags(fname) != fctx.flags())):
                     changelist.append(fname)
                 return node
 
@@ -2551,12 +2564,12 @@
                         _('note: commit message saved in %s\n') % msgfn)
                 raise
 
-        def commithook(node=hex(ret), parent1=hookp1, parent2=hookp2):
+        def commithook():
             # hack for command that use a temporary commit (eg: histedit)
             # temporary commit got stripped before hook release
             if self.changelog.hasnode(ret):
-                self.hook("commit", node=node, parent1=parent1,
-                          parent2=parent2)
+                self.hook("commit", node=hex(ret), parent1=hookp1,
+                          parent2=hookp2)
         self._afterlock(commithook)
         return ret
 
@@ -2628,8 +2641,8 @@
                         raise
 
                 # update manifest
-                removed = [f for f in sorted(removed) if f in m1 or f in m2]
-                drop = [f for f in removed if f in m]
+                removed = [f for f in removed if f in m1 or f in m2]
+                drop = sorted([f for f in removed if f in m])
                 for f in drop:
                     del m[f]
                 files = changed + removed
@@ -2655,7 +2668,7 @@
                                     p1.manifestnode(), p2.manifestnode(),
                                     added, drop, match=self.narrowmatch())
                 else:
-                    self.ui.debug('reusing manifest form p1 (listed files '
+                    self.ui.debug('reusing manifest from p1 (listed files '
                                   'actually unchanged)\n')
                     mn = p1.manifestnode()
             else:
@@ -2663,6 +2676,14 @@
                 mn = p1.manifestnode()
                 files = []
 
+            if writecopiesto == 'changeset-only':
+                # If writing only to changeset extras, use None to indicate that
+                # no entry should be written. If writing to both, write an empty
+                # entry to prevent the reader from falling back to reading
+                # filelogs.
+                p1copies = p1copies or None
+                p2copies = p2copies or None
+
             # update changelog
             self.ui.note(_("committing changelog\n"))
             self.changelog.delayupdate(tr)
@@ -3008,6 +3029,9 @@
     if createopts.get('lfs'):
         requirements.add('lfs')
 
+    if ui.configbool('format', 'bookmarks-in-store'):
+        requirements.add(bookmarks.BOOKMARKS_IN_STORE_REQUIREMENT)
+
     return requirements
 
 def filterknowncreateopts(ui, createopts):
--- a/mercurial/logcmdutil.py	Mon May 20 10:08:28 2019 +0200
+++ b/mercurial/logcmdutil.py	Mon May 20 11:40:47 2019 -0400
@@ -743,10 +743,15 @@
             return match
 
     expr = _makerevset(repo, match, pats, slowpath, opts)
-    if opts.get('graph') and opts.get('rev'):
+    if opts.get('graph'):
         # User-specified revs might be unsorted, but don't sort before
         # _makerevset because it might depend on the order of revs
-        if not (revs.isdescending() or revs.istopo()):
+        if repo.ui.configbool('experimental', 'log.topo'):
+            if not revs.istopo():
+                revs = dagop.toposort(revs, repo.changelog.parentrevs)
+                # TODO: try to iterate the set lazily
+                revs = revset.baseset(list(revs), istopo=True)
+        elif not (revs.isdescending() or revs.istopo()):
             revs.sort(reverse=True)
     if expr:
         matcher = revset.match(None, expr)
--- a/mercurial/manifest.py	Mon May 20 10:08:28 2019 +0200
+++ b/mercurial/manifest.py	Mon May 20 11:40:47 2019 -0400
@@ -466,9 +466,9 @@
             if match(fn):
                 yield fn
 
-        # for dirstate.walk, files=['.'] means "walk the whole tree".
+        # for dirstate.walk, files=[''] means "walk the whole tree".
         # follow that here, too
-        fset.discard('.')
+        fset.discard('')
 
         for fn in sorted(fset):
             if not self.hasdir(fn):
@@ -1038,9 +1038,9 @@
                 fset.remove(fn)
             yield fn
 
-        # for dirstate.walk, files=['.'] means "walk the whole tree".
+        # for dirstate.walk, files=[''] means "walk the whole tree".
         # follow that here, too
-        fset.discard('.')
+        fset.discard('')
 
         for fn in sorted(fset):
             if not self.hasdir(fn):
@@ -1048,7 +1048,7 @@
 
     def _walk(self, match):
         '''Recursively generates matching file names for walk().'''
-        visit = match.visitchildrenset(self._dir[:-1] or '.')
+        visit = match.visitchildrenset(self._dir[:-1])
         if not visit:
             return
 
@@ -1076,7 +1076,7 @@
         '''recursively generate a new manifest filtered by the match argument.
         '''
 
-        visit = match.visitchildrenset(self._dir[:-1] or '.')
+        visit = match.visitchildrenset(self._dir[:-1])
         if visit == 'all':
             return self.copy()
         ret = treemanifest(self._dir)
@@ -1235,7 +1235,7 @@
             return m._dirs.get(d, emptytree)._node
 
         # let's skip investigating things that `match` says we do not need.
-        visit = match.visitchildrenset(self._dir[:-1] or '.')
+        visit = match.visitchildrenset(self._dir[:-1])
         visit = self._loadchildrensetlazy(visit)
         if visit == 'this' or visit == 'all':
             visit = None
@@ -1254,7 +1254,7 @@
 
         If `matcher` is provided, it only returns subtrees that match.
         """
-        if matcher and not matcher.visitdir(self._dir[:-1] or '.'):
+        if matcher and not matcher.visitdir(self._dir[:-1]):
             return
         if not matcher or matcher(self._dir[:-1]):
             yield self
@@ -1486,8 +1486,8 @@
 
             _checkforbidden(added)
             # combine the changed lists into one sorted iterator
-            work = heapq.merge([(x, False) for x in added],
-                               [(x, True) for x in removed])
+            work = heapq.merge([(x, False) for x in sorted(added)],
+                               [(x, True) for x in sorted(removed)])
 
             arraytext, deltatext = m.fastdelta(self.fulltextcache[p1], work)
             cachedelta = self._revlog.rev(p1), deltatext
@@ -1685,7 +1685,7 @@
             return self._dirmancache[tree][node]
 
         if not self._narrowmatch.always():
-            if not self._narrowmatch.visitdir(tree[:-1] or '.'):
+            if not self._narrowmatch.visitdir(tree[:-1]):
                 return excludeddirmanifestctx(tree, node)
         if tree:
             if self._rootstore._treeondisk:
@@ -1878,7 +1878,7 @@
     def _storage(self):
         narrowmatch = self._manifestlog._narrowmatch
         if not narrowmatch.always():
-            if not narrowmatch.visitdir(self._dir[:-1] or '.'):
+            if not narrowmatch.visitdir(self._dir[:-1]):
                 return excludedmanifestrevlog(self._dir)
         return self._manifestlog.getstorage(self._dir)
 
--- a/mercurial/match.py	Mon May 20 10:08:28 2019 +0200
+++ b/mercurial/match.py	Mon May 20 11:40:47 2019 -0400
@@ -24,6 +24,12 @@
     stringutil,
 )
 
+try:
+    from . import rustext
+    rustext.__name__  # force actual import (see hgdemandimport)
+except ImportError:
+    rustext = None
+
 allpatternkinds = ('re', 'glob', 'path', 'relglob', 'relpath', 'relre',
                    'rootglob',
                    'listfile', 'listfile0', 'set', 'include', 'subinclude',
@@ -305,9 +311,6 @@
 
     def __call__(self, fn):
         return self.matchfn(fn)
-    def __iter__(self):
-        for f in self._files:
-            yield f
     # Callbacks related to how the matcher is used by dirstate.walk.
     # Subscribers to these events must monkeypatch the matcher object.
     def bad(self, f, msg):
@@ -377,7 +380,7 @@
           the following values (assuming the implementation of visitchildrenset
           is capable of recognizing this; some implementations are not).
 
-          '.' -> {'foo', 'qux'}
+          '' -> {'foo', 'qux'}
           'baz' -> set()
           'foo' -> {'bar'}
           # Ideally this would be 'all', but since the prefix nature of matchers
@@ -480,11 +483,19 @@
              or pycompat.byterepr(self.matchfn))
         return '<predicatenmatcher pred=%s>' % s
 
+def normalizerootdir(dir, funcname):
+    if dir == '.':
+        util.nouideprecwarn("match.%s() no longer accepts "
+                            "'.', use '' instead." % funcname, '5.1')
+        return ''
+    return dir
+
+
 class patternmatcher(basematcher):
     """Matches a set of (kind, pat, source) against a 'root' directory.
 
     >>> kindpats = [
-    ...     (b're', b'.*\.c$', b''),
+    ...     (b're', br'.*\.c$', b''),
     ...     (b'path', b'foo/a', b''),
     ...     (b'relpath', b'b', b''),
     ...     (b'glob', b'*.h', b''),
@@ -504,7 +515,7 @@
     True
 
     >>> m.files()
-    ['.', 'foo/a', 'b', '.']
+    ['', 'foo/a', 'b', '']
     >>> m.exact(b'foo/a')
     True
     >>> m.exact(b'b')
@@ -522,13 +533,13 @@
 
     @propertycache
     def _dirs(self):
-        return set(util.dirs(self._fileset)) | {'.'}
+        return set(util.dirs(self._fileset))
 
     def visitdir(self, dir):
+        dir = normalizerootdir(dir, 'visitdir')
         if self._prefix and dir in self._fileset:
             return 'all'
-        return ('.' in self._fileset or
-                dir in self._fileset or
+        return (dir in self._fileset or
                 dir in self._dirs or
                 any(parentdir in self._fileset
                     for parentdir in util.finddirs(dir)))
@@ -561,7 +572,7 @@
             addpath(f)
 
     def addpath(self, path):
-        if path == '.':
+        if path == '':
             return
         dirs = self._dirs
         findsplitdirs = _dirchildren._findsplitdirs
@@ -577,14 +588,14 @@
         #  - produces a (dirname, basename) tuple, not just 'dirname'
         #  - includes root dir
         # Unlike manifest._splittopdir, this does not suffix `dirname` with a
-        # slash, and produces '.' for the root instead of ''.
+        # slash.
         oldpos = len(path)
         pos = path.rfind('/')
         while pos != -1:
             yield path[:pos], path[pos + 1:oldpos]
             oldpos = pos
             pos = path.rfind('/', 0, pos)
-        yield '.', path[:oldpos]
+        yield '', path[:oldpos]
 
     def get(self, path):
         return self._dirs.get(path, set())
@@ -606,10 +617,10 @@
         self._parents = set(parents)
 
     def visitdir(self, dir):
+        dir = normalizerootdir(dir, 'visitdir')
         if self._prefix and dir in self._roots:
             return 'all'
-        return ('.' in self._roots or
-                dir in self._roots or
+        return (dir in self._roots or
                 dir in self._dirs or
                 dir in self._parents or
                 any(parentdir in self._roots
@@ -632,7 +643,7 @@
             return 'all'
         # Note: this does *not* include the 'dir in self._parents' case from
         # visitdir, that's handled below.
-        if ('.' in self._roots or
+        if ('' in self._roots or
             dir in self._roots or
             dir in self._dirs or
             any(parentdir in self._roots
@@ -651,7 +662,7 @@
     r'''Matches the input files exactly. They are interpreted as paths, not
     patterns (so no kind-prefixes).
 
-    >>> m = exactmatcher([b'a.txt', b're:.*\.c$'])
+    >>> m = exactmatcher([b'a.txt', br're:.*\.c$'])
     >>> m(b'a.txt')
     True
     >>> m(b'b.txt')
@@ -664,7 +675,7 @@
     So pattern 're:.*\.c$' is not considered as a regex, but as a file name
     >>> m(b'main.c')
     False
-    >>> m(b're:.*\.c$')
+    >>> m(br're:.*\.c$')
     True
     '''
 
@@ -680,22 +691,25 @@
 
     @propertycache
     def _dirs(self):
-        return set(util.dirs(self._fileset)) | {'.'}
+        return set(util.dirs(self._fileset))
 
     def visitdir(self, dir):
+        dir = normalizerootdir(dir, 'visitdir')
         return dir in self._dirs
 
     def visitchildrenset(self, dir):
+        dir = normalizerootdir(dir, 'visitchildrenset')
+
         if not self._fileset or dir not in self._dirs:
             return set()
 
-        candidates = self._fileset | self._dirs - {'.'}
-        if dir != '.':
+        candidates = self._fileset | self._dirs - {''}
+        if dir != '':
             d = dir + '/'
             candidates = set(c[len(d):] for c in candidates if
                              c.startswith(d))
         # self._dirs includes all of the directories, recursively, so if
-        # we're attempting to match foo/bar/baz.txt, it'll have '.', 'foo',
+        # we're attempting to match foo/bar/baz.txt, it'll have '', 'foo',
         # 'foo/bar' in it. Thus we can safely ignore a candidate that has a
         # '/' in it, indicating a it's for a subdir-of-a-subdir; the
         # immediate subdir will be in there without a slash.
@@ -769,7 +783,7 @@
         # Possible values for m1:         set(...), set()
         # Possible values for m2: 'this', set(...)
         # We ignore m2's set results. They're possibly incorrect:
-        #  m1 = path:dir/subdir, m2=rootfilesin:dir, visitchildrenset('.'):
+        #  m1 = path:dir/subdir, m2=rootfilesin:dir, visitchildrenset(''):
         #    m1 returns {'dir'}, m2 returns {'dir'}, if we subtracted we'd
         #    return set(), which is *not* correct, we still need to visit 'dir'!
         return m1_set
@@ -915,14 +929,16 @@
         return self._matcher.matchfn(self._path + "/" + f)
 
     def visitdir(self, dir):
-        if dir == '.':
+        dir = normalizerootdir(dir, 'visitdir')
+        if dir == '':
             dir = self._path
         else:
             dir = self._path + "/" + dir
         return self._matcher.visitdir(dir)
 
     def visitchildrenset(self, dir):
-        if dir == '.':
+        dir = normalizerootdir(dir, 'visitchildrenset')
+        if dir == '':
             dir = self._path
         else:
             dir = self._path + "/" + dir
@@ -991,18 +1007,18 @@
 
     @propertycache
     def _pathdirs(self):
-        return set(util.finddirs(self._path)) | {'.'}
+        return set(util.finddirs(self._path))
 
     def visitdir(self, dir):
         if dir == self._path:
-            return self._matcher.visitdir('.')
+            return self._matcher.visitdir('')
         if dir.startswith(self._pathprefix):
             return self._matcher.visitdir(dir[len(self._pathprefix):])
         return dir in self._pathdirs
 
     def visitchildrenset(self, dir):
         if dir == self._path:
-            return self._matcher.visitchildrenset('.')
+            return self._matcher.visitchildrenset('')
         if dir.startswith(self._pathprefix):
             return self._matcher.visitchildrenset(dir[len(self._pathprefix):])
         if dir in self._pathdirs:
@@ -1075,7 +1091,7 @@
 def patkind(pattern, default=None):
     '''If pattern is 'kind:pat' with a known kind, return kind.
 
-    >>> patkind(b're:.*\.c$')
+    >>> patkind(br're:.*\.c$')
     're'
     >>> patkind(b'glob:*.c')
     'glob'
@@ -1178,9 +1194,23 @@
     return res
 
 def _regex(kind, pat, globsuffix):
-    '''Convert a (normalized) pattern of any kind into a regular expression.
+    '''Convert a (normalized) pattern of any kind into a
+    regular expression.
     globsuffix is appended to the regexp of globs.'''
-    if not pat:
+
+    if rustext is not None:
+        try:
+            return rustext.filepatterns.build_single_regex(
+                kind,
+                pat,
+                globsuffix
+            )
+        except rustext.filepatterns.PatternError:
+            raise error.ProgrammingError(
+                'not a regex pattern: %s:%s' % (kind, pat)
+            )
+
+    if not pat and kind in ('glob', 'relpath'):
         return ''
     if kind == 're':
         return pat
@@ -1324,13 +1354,17 @@
                 if '[' in p or '{' in p or '*' in p or '?' in p:
                     break
                 root.append(p)
-            r.append('/'.join(root) or '.')
+            r.append('/'.join(root))
         elif kind in ('relpath', 'path'):
-            r.append(pat or '.')
+            if pat == '.':
+                pat = ''
+            r.append(pat)
         elif kind in ('rootfilesin',):
-            d.append(pat or '.')
+            if pat == '.':
+                pat = ''
+            d.append(pat)
         else: # relglob, re, relre
-            r.append('.')
+            r.append('')
     return r, d
 
 def _roots(kindpats):
@@ -1350,18 +1384,18 @@
     >>> _rootsdirsandparents(
     ...     [(b'glob', b'g/h/*', b''), (b'glob', b'g/h', b''),
     ...      (b'glob', b'g*', b'')])
-    (['g/h', 'g/h', '.'], [], ['g', '.'])
+    (['g/h', 'g/h', ''], [], ['', 'g'])
     >>> _rootsdirsandparents(
     ...     [(b'rootfilesin', b'g/h', b''), (b'rootfilesin', b'', b'')])
-    ([], ['g/h', '.'], ['g', '.'])
+    ([], ['g/h', ''], ['', 'g'])
     >>> _rootsdirsandparents(
     ...     [(b'relpath', b'r', b''), (b'path', b'p/p', b''),
     ...      (b'path', b'', b'')])
-    (['r', 'p/p', '.'], [], ['p', '.'])
+    (['r', 'p/p', ''], [], ['', 'p'])
     >>> _rootsdirsandparents(
     ...     [(b'relglob', b'rg*', b''), (b're', b're/', b''),
     ...      (b'relre', b'rr', b'')])
-    (['.', '.', '.'], [], ['.'])
+    (['', '', ''], [], [''])
     '''
     r, d = _patternrootsanddirs(kindpats)
 
@@ -1370,8 +1404,6 @@
     # scanned to get to either the roots or the other exact directories.
     p.extend(util.dirs(d))
     p.extend(util.dirs(r))
-    # util.dirs() does not include the root directory, so add it manually
-    p.append('.')
 
     # FIXME: all uses of this function convert these to sets, do so before
     # returning.
@@ -1421,9 +1453,24 @@
     pattern        # pattern of the current default type
 
     if sourceinfo is set, returns a list of tuples:
-    (pattern, lineno, originalline). This is useful to debug ignore patterns.
+    (pattern, lineno, originalline).
+    This is useful to debug ignore patterns.
     '''
 
+    if rustext is not None:
+        result, warnings = rustext.filepatterns.read_pattern_file(
+            filepath,
+            bool(warn),
+            sourceinfo,
+        )
+
+        for warning_params in warnings:
+            # Can't be easily emitted from Rust, because it would require
+            # a mechanism for both gettext and calling the `warn` function.
+            warn(_("%s: ignoring invalid syntax '%s'\n") % warning_params)
+
+        return result
+
     syntaxes = {
         're': 'relre:',
         'regexp': 'relre:',
--- a/mercurial/merge.py	Mon May 20 10:08:28 2019 +0200
+++ b/mercurial/merge.py	Mon May 20 11:40:47 2019 -0400
@@ -1380,7 +1380,6 @@
         # Pick the best bid for each file
         repo.ui.note(_('\nauction for merging merge bids\n'))
         actions = {}
-        dms = [] # filenames that have dm actions
         for f, bids in sorted(fbids.items()):
             # bids is a mapping from action method to list af actions
             # Consensus?
@@ -1389,8 +1388,6 @@
                 if all(a == l[0] for a in l[1:]): # len(bids) is > 1
                     repo.ui.note(_(" %s: consensus for %s\n") % (f, m))
                     actions[f] = l[0]
-                    if m == ACTION_DIR_RENAME_MOVE_LOCAL:
-                        dms.append(f)
                     continue
             # If keep is an option, just do it.
             if ACTION_KEEP in bids:
@@ -1415,18 +1412,7 @@
             repo.ui.warn(_(' %s: ambiguous merge - picked %s action\n') %
                          (f, m))
             actions[f] = l[0]
-            if m == ACTION_DIR_RENAME_MOVE_LOCAL:
-                dms.append(f)
             continue
-        # Work around 'dm' that can cause multiple actions for the same file
-        for f in dms:
-            dm, (f0, flags), msg = actions[f]
-            assert dm == ACTION_DIR_RENAME_MOVE_LOCAL, dm
-            if f0 in actions and actions[f0][0] == ACTION_REMOVE:
-                # We have one bid for removing a file and another for moving it.
-                # These two could be merged as first move and then delete ...
-                # but instead drop moving and just delete.
-                del actions[f]
         repo.ui.note(_('end of auction\n\n'))
 
     if wctx.rev() is None:
--- a/mercurial/minirst.py	Mon May 20 10:08:28 2019 +0200
+++ b/mercurial/minirst.py	Mon May 20 11:40:47 2019 -0400
@@ -44,6 +44,9 @@
 def subsubsubsection(s):
     return "%s\n%s\n\n" % (s, "." * encoding.colwidth(s))
 
+def subsubsubsubsection(s):
+    return "%s\n%s\n\n" % (s, "'" * encoding.colwidth(s))
+
 def replace(text, substs):
     '''
     Apply a list of (find, replace) pairs to a text.
--- a/mercurial/narrowspec.py	Mon May 20 10:08:28 2019 +0200
+++ b/mercurial/narrowspec.py	Mon May 20 11:40:47 2019 -0400
@@ -15,6 +15,7 @@
     match as matchmod,
     merge,
     repository,
+    scmutil,
     sparse,
     util,
 )
@@ -292,8 +293,8 @@
     removedmatch = matchmod.differencematcher(oldmatch, newmatch)
 
     ds = repo.dirstate
-    lookup, status = ds.status(removedmatch, subrepos=[], ignored=False,
-                               clean=True, unknown=False)
+    lookup, status = ds.status(removedmatch, subrepos=[], ignored=True,
+                               clean=True, unknown=True)
     trackeddirty = status.modified + status.added
     clean = status.clean
     if assumeclean:
@@ -302,8 +303,13 @@
     else:
         trackeddirty.extend(lookup)
     _deletecleanfiles(repo, clean)
+    uipathfn = scmutil.getuipathfn(repo)
     for f in sorted(trackeddirty):
-        repo.ui.status(_('not deleting possibly dirty file %s\n') % f)
+        repo.ui.status(_('not deleting possibly dirty file %s\n') % uipathfn(f))
+    for f in sorted(status.unknown):
+        repo.ui.status(_('not deleting unknown file %s\n') % uipathfn(f))
+    for f in sorted(status.ignored):
+        repo.ui.status(_('not deleting ignored file %s\n') % uipathfn(f))
     for f in clean + trackeddirty:
         ds.drop(f)
 
--- a/mercurial/obsolete.py	Mon May 20 10:08:28 2019 +0200
+++ b/mercurial/obsolete.py	Mon May 20 11:40:47 2019 -0400
@@ -93,10 +93,6 @@
 _calcsize = struct.calcsize
 propertycache = util.propertycache
 
-# the obsolete feature is not mature enough to be enabled by default.
-# you have to rely on third party extension extension to enable this.
-_enabled = False
-
 # Options for obsolescence
 createmarkersopt = 'createmarkers'
 allowunstableopt = 'allowunstable'
@@ -124,11 +120,6 @@
         if 'all' in result:
             return True
 
-        # For migration purposes, temporarily return true if the config hasn't
-        # been set but _enabled is true.
-        if len(result) == 0 and _enabled:
-            return True
-
         # Temporary hack for next check
         newconfig = repo.ui.config('experimental', 'evolution.createmarkers')
         if newconfig:
--- a/mercurial/policy.py	Mon May 20 10:08:28 2019 +0200
+++ b/mercurial/policy.py	Mon May 20 11:40:47 2019 -0400
@@ -69,7 +69,7 @@
     (r'cext', r'bdiff'): 3,
     (r'cext', r'mpatch'): 1,
     (r'cext', r'osutil'): 4,
-    (r'cext', r'parsers'): 12,
+    (r'cext', r'parsers'): 13,
 }
 
 # map import request to other package or module
--- a/mercurial/registrar.py	Mon May 20 10:08:28 2019 +0200
+++ b/mercurial/registrar.py	Mon May 20 11:40:47 2019 -0400
@@ -338,19 +338,10 @@
             '''
             pass
 
-        # old API (DEPRECATED)
-        @templatekeyword('mykeyword')
-        def mykeywordfunc(repo, ctx, templ, cache, revcache, **args):
-            '''Explanation of this template keyword ....
-            '''
-            pass
-
     The first string argument is used also in online help.
 
     Optional argument 'requires' should be a collection of resource names
-    which the template keyword depends on. This also serves as a flag to
-    switch to the new API. If 'requires' is unspecified, all template
-    keywords and resources are expanded to the function arguments.
+    which the template keyword depends on.
 
     'templatekeyword' instance in example above can be used to
     decorate multiple functions.
@@ -362,7 +353,7 @@
     Otherwise, explicit 'templatekw.loadkeyword()' is needed.
     """
 
-    def _extrasetup(self, name, func, requires=None):
+    def _extrasetup(self, name, func, requires=()):
         func._requires = requires
 
 class templatefilter(_templateregistrarbase):
--- a/mercurial/repair.py	Mon May 20 10:08:28 2019 +0200
+++ b/mercurial/repair.py	Mon May 20 11:40:47 2019 -0400
@@ -279,7 +279,9 @@
         if rev in tostrip:
             updatebm.append(m)
     newbmtarget = None
-    if updatebm: # don't compute anything is there is no bookmark to move anyway
+    # If we need to move bookmarks, compute bookmark
+    # targets. Otherwise we can skip doing this logic.
+    if updatebm:
         # For a set s, max(parents(s) - s) is the same as max(heads(::s - s)),
         # but is much faster
         newbmtarget = repo.revs('max(parents(%ld) - (%ld))', tostrip, tostrip)
@@ -364,8 +366,9 @@
     striptrees(repo, tr, striprev, files)
 
 def striptrees(repo, tr, striprev, files):
-    if 'treemanifest' in repo.requirements: # safe but unnecessary
-                                            # otherwise
+    if 'treemanifest' in repo.requirements:
+        # This logic is safe if treemanifest isn't enabled, but also
+        # pointless, so we skip it if treemanifest isn't enabled.
         for unencoded, encoded, size in repo.store.datafiles():
             if (unencoded.startswith('meta/') and
                 unencoded.endswith('00manifest.i')):
@@ -416,7 +419,9 @@
 
         progress.complete()
 
-        if 'treemanifest' in repo.requirements: # safe but unnecessary otherwise
+        if 'treemanifest' in repo.requirements:
+            # This logic is safe if treemanifest isn't enabled, but also
+            # pointless, so we skip it if treemanifest isn't enabled.
             for dir in util.dirs(seenfiles):
                 i = 'meta/%s/00manifest.i' % dir
                 d = 'meta/%s/00manifest.d' % dir
--- a/mercurial/repository.py	Mon May 20 10:08:28 2019 +0200
+++ b/mercurial/repository.py	Mon May 20 11:40:47 2019 -0400
@@ -291,6 +291,10 @@
 class ipeerrequests(interfaceutil.Interface):
     """Interface for executing commands on a peer."""
 
+    limitedarguments = interfaceutil.Attribute(
+        """True if the peer cannot receive large argument value for commands."""
+    )
+
     def commandexecutor():
         """A context manager that resolves to an ipeercommandexecutor.
 
@@ -329,6 +333,8 @@
 class peer(object):
     """Base class for peer repositories."""
 
+    limitedarguments = False
+
     def capable(self, name):
         caps = self.capabilities()
         if name in caps:
--- a/mercurial/repoview.py	Mon May 20 10:08:28 2019 +0200
+++ b/mercurial/repoview.py	Mon May 20 11:40:47 2019 -0400
@@ -17,6 +17,10 @@
     phases,
     pycompat,
     tags as tagsmod,
+    util,
+)
+from .utils import (
+    repoviewutil,
 )
 
 def hideablerevs(repo):
@@ -154,6 +158,35 @@
                'immutable':  computemutable,
                'base':  computeimpactable}
 
+_basefiltername = list(filtertable)
+
+def extrafilter(ui):
+    """initialize extra filter and return its id
+
+    If extra filtering is configured, we make sure the associated filtered view
+    are declared and return the associated id.
+    """
+    frevs = ui.config('experimental', 'extra-filter-revs')
+    if frevs is None:
+        return None
+
+    fid = pycompat.sysbytes(util.DIGESTS['sha1'](frevs).hexdigest())[:12]
+
+    combine = lambda fname: fname + '%' + fid
+
+    subsettable = repoviewutil.subsettable
+
+    if combine('base') not in filtertable:
+        for name in _basefiltername:
+            def extrafilteredrevs(repo, *args, **kwargs):
+                baserevs = filtertable[name](repo, *args, **kwargs)
+                extrarevs = frozenset(repo.revs(frevs))
+                return baserevs | extrarevs
+            filtertable[combine(name)] = extrafilteredrevs
+            if name in subsettable:
+                subsettable[combine(name)] = combine(subsettable[name])
+    return fid
+
 def filterrevs(repo, filtername, visibilityexceptions=None):
     """returns set of filtered revision for this filter name
 
--- a/mercurial/revset.py	Mon May 20 10:08:28 2019 +0200
+++ b/mercurial/revset.py	Mon May 20 11:40:47 2019 -0400
@@ -52,6 +52,9 @@
 spanset = smartset.spanset
 fullreposet = smartset.fullreposet
 
+# revisions not included in all(), but populated if specified
+_virtualrevs = (node.nullrev, node.wdirrev)
+
 # Constants for ordering requirement, used in getset():
 #
 # If 'define', any nested functions and operations MAY change the ordering of
@@ -120,8 +123,7 @@
     if not x:
         raise error.ParseError(_("empty string is not a valid revision"))
     x = scmutil.intrev(scmutil.revsymbol(repo, x))
-    if (x in subset
-        or x == node.nullrev and isinstance(subset, fullreposet)):
+    if x in subset or x in _virtualrevs and isinstance(subset, fullreposet):
         return baseset([x])
     return baseset()
 
@@ -1847,7 +1849,7 @@
     except (TypeError, ValueError):
         # i18n: "rev" is a keyword
         raise error.ParseError(_("rev expects a number"))
-    if l not in repo.changelog and l not in (node.nullrev, node.wdirrev):
+    if l not in repo.changelog and l not in _virtualrevs:
         return baseset()
     return subset & baseset([l])
 
@@ -2262,7 +2264,7 @@
             if r in seen:
                 continue
             if (r in subset
-                or r == node.nullrev and isinstance(subset, fullreposet)):
+                or r in _virtualrevs and isinstance(subset, fullreposet)):
                 ls.append(r)
             seen.add(r)
     return baseset(ls)
--- a/mercurial/setdiscovery.py	Mon May 20 10:08:28 2019 +0200
+++ b/mercurial/setdiscovery.py	Mon May 20 11:40:47 2019 -0400
@@ -119,13 +119,13 @@
         self._childrenmap = None
 
     def addcommons(self, commons):
-        """registrer nodes known as common"""
+        """register nodes known as common"""
         self._common.addbases(commons)
         if self._undecided is not None:
             self._common.removeancestorsfrom(self._undecided)
 
     def addmissings(self, missings):
-        """registrer some nodes as missing"""
+        """register some nodes as missing"""
         newmissing = self._repo.revs('%ld::%ld', missings, self.undecided)
         if newmissing:
             self.missing.update(newmissing)
@@ -275,9 +275,63 @@
     # early exit if we know all the specified remote heads already
     ui.debug("query 1; heads\n")
     roundtrips += 1
-    sample = _limitsample(ownheads, initialsamplesize)
-    # indices between sample and externalized version must match
-    sample = list(sample)
+    # We also ask remote about all the local heads. That set can be arbitrarily
+    # large, so we used to limit it size to `initialsamplesize`. We no longer
+    # do as it proved counter productive. The skipped heads could lead to a
+    # large "undecided" set, slower to be clarified than if we asked the
+    # question for all heads right away.
+    #
+    # We are already fetching all server heads using the `heads` commands,
+    # sending a equivalent number of heads the other way should not have a
+    # significant impact.  In addition, it is very likely that we are going to
+    # have to issue "known" request for an equivalent amount of revisions in
+    # order to decide if theses heads are common or missing.
+    #
+    # find a detailled analysis below.
+    #
+    # Case A: local and server both has few heads
+    #
+    #     Ownheads is below initialsamplesize, limit would not have any effect.
+    #
+    # Case B: local has few heads and server has many
+    #
+    #     Ownheads is below initialsamplesize, limit would not have any effect.
+    #
+    # Case C: local and server both has many heads
+    #
+    #     We now transfert some more data, but not significantly more than is
+    #     already transfered to carry the server heads.
+    #
+    # Case D: local has many heads, server has few
+    #
+    #   D.1 local heads are mostly known remotely
+    #
+    #     All the known head will have be part of a `known` request at some
+    #     point for the discovery to finish. Sending them all earlier is
+    #     actually helping.
+    #
+    #     (This case is fairly unlikely, it requires the numerous heads to all
+    #     be merged server side in only a few heads)
+    #
+    #   D.2 local heads are mostly missing remotely
+    #
+    #     To determine that the heads are missing, we'll have to issue `known`
+    #     request for them or one of their ancestors. This amount of `known`
+    #     request will likely be in the same order of magnitude than the amount
+    #     of local heads.
+    #
+    #     The only case where we can be more efficient using `known` request on
+    #     ancestors are case were all the "missing" local heads are based on a
+    #     few changeset, also "missing".  This means we would have a "complex"
+    #     graph (with many heads) attached to, but very independant to a the
+    #     "simple" graph on the server. This is a fairly usual case and have
+    #     not been met in the wild so far.
+    if remote.limitedarguments:
+        sample = _limitsample(ownheads, initialsamplesize)
+        # indices between sample and externalized version must match
+        sample = list(sample)
+    else:
+        sample = ownheads
 
     with remote.commandexecutor() as e:
         fheads = e.callcommand('heads', {})
--- a/mercurial/sslutil.py	Mon May 20 10:08:28 2019 +0200
+++ b/mercurial/sslutil.py	Mon May 20 11:40:47 2019 -0400
@@ -16,6 +16,7 @@
 
 from .i18n import _
 from . import (
+    encoding,
     error,
     node,
     pycompat,
@@ -348,6 +349,17 @@
     if not serverhostname:
         raise error.Abort(_('serverhostname argument is required'))
 
+    if b'SSLKEYLOGFILE' in encoding.environ:
+        try:
+            import sslkeylog
+            sslkeylog.set_keylog(pycompat.fsdecode(
+                encoding.environ[b'SSLKEYLOGFILE']))
+            ui.warn(
+                b'sslkeylog enabled by SSLKEYLOGFILE environment variable\n')
+        except ImportError:
+            ui.warn(b'sslkeylog module missing, '
+                    b'but SSLKEYLOGFILE set in environment\n')
+
     for f in (keyfile, certfile):
         if f and not os.path.exists(f):
             raise error.Abort(
--- a/mercurial/statichttprepo.py	Mon May 20 10:08:28 2019 +0200
+++ b/mercurial/statichttprepo.py	Mon May 20 11:40:47 2019 -0400
@@ -155,6 +155,7 @@
 
         self.names = namespaces.namespaces()
         self.filtername = None
+        self._extrafilterid = None
 
         try:
             requirements = set(self.vfs.read(b'requires').splitlines())
--- a/mercurial/store.py	Mon May 20 10:08:28 2019 +0200
+++ b/mercurial/store.py	Mon May 20 11:40:47 2019 -0400
@@ -40,7 +40,7 @@
     if path.startswith('data/'):
         return matcher(path[len('data/'):-len('.i')])
     elif path.startswith('meta/'):
-        return matcher.visitdir(path[len('meta/'):-len('/00manifest.i')] or '.')
+        return matcher.visitdir(path[len('meta/'):-len('/00manifest.i')])
 
     raise error.ProgrammingError("cannot decode path %s" % path)
 
@@ -337,7 +337,7 @@
         mode = None
     return mode
 
-_data = ('narrowspec data meta 00manifest.d 00manifest.i'
+_data = ('bookmarks narrowspec data meta 00manifest.d 00manifest.i'
          ' 00changelog.d 00changelog.i phaseroots obsstore')
 
 def isrevlog(f, kind, st):
@@ -612,7 +612,7 @@
                     raise
 
     def copylist(self):
-        d = ('narrowspec data meta dh fncache phaseroots obsstore'
+        d = ('bookmarks narrowspec data meta dh fncache phaseroots obsstore'
              ' 00manifest.d 00manifest.i 00changelog.d 00changelog.i')
         return (['requires', '00changelog.i'] +
                 ['store/' + f for f in d.split()])
--- a/mercurial/tags.py	Mon May 20 10:08:28 2019 +0200
+++ b/mercurial/tags.py	Mon May 20 11:40:47 2019 -0400
@@ -18,6 +18,7 @@
     bin,
     hex,
     nullid,
+    nullrev,
     short,
 )
 from .i18n import _
@@ -89,7 +90,7 @@
     unfi = repo.unfiltered()
     tonode = unfi.changelog.node
     nodes = [tonode(r) for r in revs]
-    fnodes = _getfnodes(ui, repo, nodes[::-1]) # reversed help the cache
+    fnodes = _getfnodes(ui, repo, nodes)
     fnodes = _filterfnodes(fnodes, nodes)
     return fnodes
 
@@ -457,7 +458,8 @@
     # This is the most expensive part of finding tags, so performance
     # depends primarily on the size of newheads.  Worst case: no cache
     # file, so newheads == repoheads.
-    cachefnode = _getfnodes(ui, repo, repoheads)
+    # Reversed order helps the cache ('repoheads' is in descending order)
+    cachefnode = _getfnodes(ui, repo, reversed(repoheads))
 
     # Caller has to iterate over all heads, but can use the filenodes in
     # cachefnode to get to each .hgtags revision quickly.
@@ -472,7 +474,7 @@
     starttime = util.timer()
     fnodescache = hgtagsfnodescache(repo.unfiltered())
     cachefnode = {}
-    for node in reversed(nodes):
+    for node in nodes:
         fnode = fnodescache.getfnode(node)
         if fnode != nullid:
             cachefnode[node] = fnode
@@ -691,6 +693,9 @@
         If an .hgtags does not exist at the specified revision, nullid is
         returned.
         """
+        if node == nullid:
+            return nullid
+
         ctx = self._repo[node]
         rev = ctx.rev()
 
@@ -715,12 +720,33 @@
         if not computemissing:
             return None
 
-        # Populate missing entry.
-        try:
-            fnode = ctx.filenode('.hgtags')
-        except error.LookupError:
-            # No .hgtags file on this revision.
-            fnode = nullid
+        fnode = None
+        cl = self._repo.changelog
+        p1rev, p2rev = cl._uncheckedparentrevs(rev)
+        p1node = cl.node(p1rev)
+        p1fnode = self.getfnode(p1node, computemissing=False)
+        if p2rev != nullrev:
+            # There is some no-merge changeset where p1 is null and p2 is set
+            # Processing them as merge is just slower, but still gives a good
+            # result.
+            p2node = cl.node(p1rev)
+            p2fnode = self.getfnode(p2node, computemissing=False)
+            if p1fnode != p2fnode:
+                # we cannot rely on readfast because we don't know against what
+                # parent the readfast delta is computed
+                p1fnode = None
+        if p1fnode is not None:
+            mctx = ctx.manifestctx()
+            fnode = mctx.readfast().get('.hgtags')
+            if fnode is None:
+                fnode = p1fnode
+        if fnode is None:
+            # Populate missing entry.
+            try:
+                fnode = ctx.filenode('.hgtags')
+            except error.LookupError:
+                # No .hgtags file on this revision.
+                fnode = nullid
 
         self._writeentry(offset, properprefix, fnode)
         return fnode
--- a/mercurial/templateutil.py	Mon May 20 10:08:28 2019 +0200
+++ b/mercurial/templateutil.py	Mon May 20 11:40:47 2019 -0400
@@ -874,7 +874,6 @@
 def _recursivesymbolblocker(key):
     def showrecursion(context, mapping):
         raise error.Abort(_("recursive reference '%s' in template") % key)
-    showrecursion._requires = ()  # mark as new-style templatekw
     return showrecursion
 
 def runsymbol(context, mapping, key, default=''):
@@ -888,19 +887,6 @@
             v = context.process(key, safemapping)
         except TemplateNotFound:
             v = default
-    if callable(v) and getattr(v, '_requires', None) is None:
-        # old templatekw: expand all keywords and resources
-        # (TODO: drop support for old-style functions. 'f._requires = ()'
-        #  can be removed.)
-        props = {k: context._resources.lookup(mapping, k)
-                 for k in context._resources.knownkeys()}
-        # pass context to _showcompatlist() through templatekw._showlist()
-        props['templ'] = context
-        props.update(mapping)
-        ui = props.get('ui')
-        if ui:
-            ui.deprecwarn("old-style template keyword '%s'" % key, '4.8')
-        return v(**pycompat.strkwargs(props))
     if callable(v):
         # new templatekw
         try:
--- a/mercurial/unionrepo.py	Mon May 20 10:08:28 2019 +0200
+++ b/mercurial/unionrepo.py	Mon May 20 11:40:47 2019 -0400
@@ -128,9 +128,10 @@
 
     def addrevision(self, text, transaction, link, p1=None, p2=None, d=None):
         raise NotImplementedError
-    def addgroup(self, deltas, transaction, addrevisioncb=None):
+    def addgroup(self, deltas, linkmapper, transaction, addrevisioncb=None,
+                 maybemissingparents=False):
         raise NotImplementedError
-    def strip(self, rev, minlink):
+    def strip(self, minlink, transaction):
         raise NotImplementedError
     def checksize(self):
         raise NotImplementedError
--- a/mercurial/util.py	Mon May 20 10:08:28 2019 +0200
+++ b/mercurial/util.py	Mon May 20 11:40:47 2019 -0400
@@ -3209,6 +3209,7 @@
     while pos != -1:
         yield path[:pos]
         pos = path.rfind('/', 0, pos)
+    yield ''
 
 
 # convenient shortcut
--- a/mercurial/verify.py	Mon May 20 10:08:28 2019 +0200
+++ b/mercurial/verify.py	Mon May 20 11:40:47 2019 -0400
@@ -22,9 +22,13 @@
     util,
 )
 
-def verify(repo):
+VERIFY_DEFAULT = 0
+VERIFY_FULL = 1
+
+def verify(repo, level=None):
     with repo.lock():
-        return verifier(repo).verify()
+        v = verifier(repo, level)
+        return v.verify()
 
 def _normpath(f):
     # under hg < 2.4, convert didn't sanitize paths properly, so a
@@ -34,10 +38,13 @@
     return f
 
 class verifier(object):
-    def __init__(self, repo):
+    def __init__(self, repo, level=None):
         self.repo = repo.unfiltered()
         self.ui = repo.ui
         self.match = repo.narrowmatch()
+        if level is None:
+            level = VERIFY_DEFAULT
+        self._level = level
         self.badrevs = set()
         self.errors = 0
         self.warnings = 0
@@ -330,6 +337,16 @@
                         filenodes.setdefault(fullpath, {}).setdefault(fn, lr)
             except Exception as inst:
                 self._exc(lr, _("reading delta %s") % short(n), inst, label)
+            if self._level >= VERIFY_FULL:
+                try:
+                    # Various issues can affect manifest. So we read each full
+                    # text from storage. This triggers the checks from the core
+                    # code (eg: hash verification, filename are ordered, etc.)
+                    mfdelta = mfl.get(dir, n).read()
+                except Exception as inst:
+                    self._exc(lr, _("reading full manifest %s") % short(n),
+                              inst, label)
+
         if not dir:
             progress.complete()
 
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/relnotes/next	Mon May 20 11:40:47 2019 -0400
@@ -0,0 +1,37 @@
+== New Features ==
+
+ * New config `commands.commit.post-status` shows status after successful
+ commit.
+
+
+== New Experimental Features ==
+
+ * New config `experimental.log.topo` makes `hg log -G` use
+   topological sorting. This is especially useful for aliases since it
+   lets the alias accept an `-r` option while still using topological
+   sorting with or without the `-r` (unlike if you use the `sort(...,
+   topo)` revset).
+
+
+== Bug Fixes  ==
+
+
+== Backwards Compatibility Changes ==
+
+ * Removed (experimental) support for log graph lines mixing
+   parent/grandparent styles. Setting
+   e.g. `experimental.graphstyle.parent = !` and
+   `experimental.graphstyle.grandparent = 3.` would use `!` for the
+   first three lines of the graph and then `.`. This is no longer
+   supported.
+
+
+== Internal API Changes ==
+
+ * Matchers are no longer iterable. Use `match.files()` instead.
+
+ * `match.visitdir()` and `match.visitchildrenset()` now expect the
+   empty string instead of '.' to indicate the root directory.
+
+ * `util.dirs()` and `util.finddirs()` now include an entry for the
+   root directory (empty string).
--- a/rust/Cargo.lock	Mon May 20 10:08:28 2019 +0200
+++ b/rust/Cargo.lock	Mon May 20 11:40:47 2019 -0400
@@ -3,7 +3,7 @@
 version = "0.6.9"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
- "memchr 2.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "memchr 2.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
@@ -17,8 +17,8 @@
 source = "registry+https://github.com/rust-lang/crates.io-index"
 
 [[package]]
-name = "cfg-if"
-version = "0.1.6"
+name = "byteorder"
+version = "1.3.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 
 [[package]]
@@ -49,8 +49,12 @@
 name = "hg-core"
 version = "0.1.0"
 dependencies = [
+ "byteorder 1.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "memchr 2.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
  "rand 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)",
  "rand_pcg 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "regex 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
@@ -74,7 +78,7 @@
 
 [[package]]
 name = "lazy_static"
-version = "1.2.0"
+version = "1.3.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 
 [[package]]
@@ -84,13 +88,8 @@
 
 [[package]]
 name = "memchr"
-version = "2.1.2"
+version = "2.2.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "cfg-if 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)",
- "libc 0.2.45 (registry+https://github.com/rust-lang/crates.io-index)",
- "version_check 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
-]
 
 [[package]]
 name = "num-traits"
@@ -225,7 +224,7 @@
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
  "aho-corasick 0.6.9 (registry+https://github.com/rust-lang/crates.io-index)",
- "memchr 2.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "memchr 2.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
  "regex-syntax 0.6.4 (registry+https://github.com/rust-lang/crates.io-index)",
  "thread_local 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
  "utf8-ranges 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -265,7 +264,7 @@
 version = "0.3.6"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
- "lazy_static 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
@@ -279,11 +278,6 @@
 source = "registry+https://github.com/rust-lang/crates.io-index"
 
 [[package]]
-name = "version_check"
-version = "0.1.5"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-
-[[package]]
 name = "winapi"
 version = "0.3.6"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -306,13 +300,13 @@
 "checksum aho-corasick 0.6.9 (registry+https://github.com/rust-lang/crates.io-index)" = "1e9a933f4e58658d7b12defcf96dc5c720f20832deebe3e0a19efd3b6aaeeb9e"
 "checksum autocfg 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "a6d640bee2da49f60a4068a7fae53acde8982514ab7bae8b8cea9e88cbcfd799"
 "checksum bitflags 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)" = "228047a76f468627ca71776ecdebd732a3423081fcf5125585bcd7c49886ce12"
-"checksum cfg-if 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "082bb9b28e00d3c9d39cc03e64ce4cea0f1bb9b3fde493f0cbc008472d22bdf4"
+"checksum byteorder 1.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "a019b10a2a7cdeb292db131fc8113e57ea2a908f6e7894b0c3c671893b65dbeb"
 "checksum cloudabi 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "ddfc5b9aa5d4507acaf872de71051dfd0e309860e88966e1051e462a077aac4f"
 "checksum cpython 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "b489034e723e7f5109fecd19b719e664f89ef925be785885252469e9822fa940"
 "checksum fuchsia-cprng 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "81f7f8eb465745ea9b02e2704612a9946a59fa40572086c6fd49d6ddcf30bf31"
-"checksum lazy_static 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "a374c89b9db55895453a74c1e38861d9deec0b01b405a82516e9d5de4820dea1"
+"checksum lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "bc5729f27f159ddd61f4df6228e827e86643d4d3e7c32183cb30a1c08f604a14"
 "checksum libc 0.2.45 (registry+https://github.com/rust-lang/crates.io-index)" = "2d2857ec59fadc0773853c664d2d18e7198e83883e7060b63c924cb077bd5c74"
-"checksum memchr 2.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "db4c41318937f6e76648f42826b1d9ade5c09cafb5aef7e351240a70f39206e9"
+"checksum memchr 2.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "2efc7bc57c883d4a4d6e3246905283d8dae951bb3bd32f49d6ef297f546e1c39"
 "checksum num-traits 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)" = "0b3a5d7cc97d6d30d8b9bc8fa19bf45349ffe46241e8816f50f62f6d6aaabee1"
 "checksum python27-sys 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "56114c37d4dca82526d74009df7782a28c871ac9d36b19d4cb9e67672258527e"
 "checksum python3-sys 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "61e4aac43f833fd637e429506cb2ac9d7df672c4b68f2eaaa163649b7fdc0444"
@@ -335,7 +329,6 @@
 "checksum thread_local 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)" = "c6b53e329000edc2b34dbe8545fd20e55a333362d0a321909685a19bd28c3f1b"
 "checksum ucd-util 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "535c204ee4d8434478593480b8f86ab45ec9aae0e83c568ca81abf0fd0e88f86"
 "checksum utf8-ranges 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "796f7e48bef87609f7ade7e06495a87d5cd06c7866e6a5cbfceffc558a243737"
-"checksum version_check 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "914b1a6776c4c929a602fafd8bc742e06365d4bcbe48c30f9cca5824f70dc9dd"
 "checksum winapi 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)" = "92c1eb33641e276cfa214a0522acad57be5c56b10cb348b3c5117db75f3ac4b0"
 "checksum winapi-i686-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
 "checksum winapi-x86_64-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
--- a/rust/hg-core/Cargo.toml	Mon May 20 10:08:28 2019 +0200
+++ b/rust/hg-core/Cargo.toml	Mon May 20 11:40:47 2019 -0400
@@ -10,3 +10,9 @@
 [dev-dependencies]
 rand = "*"
 rand_pcg = "*"
+
+[dependencies]
+byteorder = "1.3.1"
+lazy_static = "1.3.0"
+memchr = "2.2.0"
+regex = "^1.1"
--- a/rust/hg-core/src/ancestors.rs	Mon May 20 10:08:28 2019 +0200
+++ b/rust/hg-core/src/ancestors.rs	Mon May 20 11:40:47 2019 -0400
@@ -8,9 +8,9 @@
 //! Rust versions of generic DAG ancestors algorithms for Mercurial
 
 use super::{Graph, GraphError, Revision, NULL_REVISION};
+use crate::dagops;
 use std::cmp::max;
 use std::collections::{BinaryHeap, HashSet};
-use crate::dagops;
 
 /// Iterator over the ancestors of a given list of revisions
 /// This is a generic type, defined and implemented for any Graph, so that
--- a/rust/hg-core/src/dagops.rs	Mon May 20 10:08:28 2019 +0200
+++ b/rust/hg-core/src/dagops.rs	Mon May 20 11:40:47 2019 -0400
@@ -13,7 +13,8 @@
 //! - Similarly *relative roots* of a collection of `Revision`, we mean
 //!   those whose parents, if any, don't belong to the collection.
 use super::{Graph, GraphError, Revision, NULL_REVISION};
-use std::collections::HashSet;
+use crate::ancestors::AncestorsIterator;
+use std::collections::{BTreeSet, HashSet};
 
 fn remove_parents(
     graph: &impl Graph,
@@ -80,6 +81,92 @@
     Ok(())
 }
 
+/// Roots of `revs`, passed as a `HashSet`
+///
+/// They are returned in arbitrary order
+pub fn roots<G: Graph>(
+    graph: &G,
+    revs: &HashSet<Revision>,
+) -> Result<Vec<Revision>, GraphError> {
+    let mut roots: Vec<Revision> = Vec::new();
+    for rev in revs {
+        if graph
+            .parents(*rev)?
+            .iter()
+            .filter(|p| **p != NULL_REVISION)
+            .all(|p| !revs.contains(p))
+        {
+            roots.push(*rev);
+        }
+    }
+    Ok(roots)
+}
+
+/// Compute the topological range between two collections of revisions
+///
+/// This is equivalent to the revset `<roots>::<heads>`.
+///
+/// Currently, the given `Graph` has to implement `Clone`, which means
+/// actually cloning just a reference-counted Python pointer if
+/// it's passed over through `rust-cpython`. This is due to the internal
+/// use of `AncestorsIterator`
+///
+/// # Algorithmic details
+///
+/// This is a two-pass swipe inspired from what `reachableroots2` from
+/// `mercurial.cext.parsers` does to obtain the same results.
+///
+/// - first, we climb up the DAG from `heads` in topological order, keeping
+///   them in the vector `heads_ancestors` vector, and adding any element of
+///   `roots` we find among them to the resulting range.
+/// - Then, we iterate on that recorded vector so that a revision is always
+///   emitted after its parents and add all revisions whose parents are already
+///   in the range to the results.
+///
+/// # Performance notes
+///
+/// The main difference with the C implementation is that
+/// the latter uses a flat array with bit flags, instead of complex structures
+/// like `HashSet`, making it faster in most scenarios. In theory, it's
+/// possible that the present implementation could be more memory efficient
+/// for very large repositories with many branches.
+pub fn range(
+    graph: &(impl Graph + Clone),
+    roots: impl IntoIterator<Item = Revision>,
+    heads: impl IntoIterator<Item = Revision>,
+) -> Result<BTreeSet<Revision>, GraphError> {
+    let mut range = BTreeSet::new();
+    let roots: HashSet<Revision> = roots.into_iter().collect();
+    let min_root: Revision = match roots.iter().cloned().min() {
+        None => {
+            return Ok(range);
+        }
+        Some(r) => r,
+    };
+
+    // Internally, AncestorsIterator currently maintains a `HashSet`
+    // of all seen revision, which is also what we record, albeit in an ordered
+    // way. There's room for improvement on this duplication.
+    let ait = AncestorsIterator::new(graph.clone(), heads, min_root, true)?;
+    let mut heads_ancestors: Vec<Revision> = Vec::new();
+    for revres in ait {
+        let rev = revres?;
+        if roots.contains(&rev) {
+            range.insert(rev);
+        }
+        heads_ancestors.push(rev);
+    }
+
+    for rev in heads_ancestors.into_iter().rev() {
+        for parent in graph.parents(rev)?.iter() {
+            if *parent != NULL_REVISION && range.contains(parent) {
+                range.insert(rev);
+            }
+        }
+    }
+    Ok(range)
+}
+
 #[cfg(test)]
 mod tests {
 
@@ -137,4 +224,53 @@
         Ok(())
     }
 
+    /// Apply `roots()` and sort the result for easier comparison
+    fn roots_sorted(
+        graph: &impl Graph,
+        revs: &[Revision],
+    ) -> Result<Vec<Revision>, GraphError> {
+        let mut as_vec = roots(graph, &revs.iter().cloned().collect())?;
+        as_vec.sort();
+        Ok(as_vec)
+    }
+
+    #[test]
+    fn test_roots() -> Result<(), GraphError> {
+        assert_eq!(roots_sorted(&SampleGraph, &[4, 5, 6])?, vec![4]);
+        assert_eq!(
+            roots_sorted(&SampleGraph, &[4, 1, 6, 12, 0])?,
+            vec![0, 4, 12]
+        );
+        assert_eq!(
+            roots_sorted(&SampleGraph, &[1, 2, 3, 4, 5, 6, 7, 8, 9])?,
+            vec![1, 8]
+        );
+        Ok(())
+    }
+
+    /// Apply `range()` and convert the result into a Vec for easier comparison
+    fn range_vec(
+        graph: impl Graph + Clone,
+        roots: &[Revision],
+        heads: &[Revision],
+    ) -> Result<Vec<Revision>, GraphError> {
+        range(&graph, roots.iter().cloned(), heads.iter().cloned())
+            .map(|bs| bs.into_iter().collect())
+    }
+
+    #[test]
+    fn test_range() -> Result<(), GraphError> {
+        assert_eq!(range_vec(SampleGraph, &[0], &[4])?, vec![0, 1, 2, 4]);
+        assert_eq!(range_vec(SampleGraph, &[0], &[8])?, vec![]);
+        assert_eq!(
+            range_vec(SampleGraph, &[5, 6], &[10, 11, 13])?,
+            vec![5, 10]
+        );
+        assert_eq!(
+            range_vec(SampleGraph, &[5, 6], &[10, 12])?,
+            vec![5, 6, 9, 10, 12]
+        );
+        Ok(())
+    }
+
 }
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/rust/hg-core/src/dirstate.rs	Mon May 20 11:40:47 2019 -0400
@@ -0,0 +1,409 @@
+// Copyright 2019 Raphaël Gomès <rgomes@octobus.net>
+//
+// This software may be used and distributed according to the terms of the
+// GNU General Public License version 2 or any later version.
+
+use byteorder::{BigEndian, ReadBytesExt, WriteBytesExt};
+use std::collections::HashMap;
+use std::io::Cursor;
+use {DirstatePackError, DirstateParseError};
+
+#[derive(Debug, PartialEq, Copy, Clone)]
+pub struct DirstateParents<'a> {
+    pub p1: &'a [u8],
+    pub p2: &'a [u8],
+}
+/// The C implementation uses all signed types. This will be an issue
+/// either when 4GB+ source files are commonplace or in 2038, whichever
+/// comes first.
+#[derive(Debug, PartialEq)]
+pub struct DirstateEntry {
+    pub state: i8,
+    pub mode: i32,
+    pub mtime: i32,
+    pub size: i32,
+}
+pub type DirstateVec = Vec<(Vec<u8>, DirstateEntry)>;
+
+#[derive(Debug, PartialEq)]
+pub struct CopyVecEntry<'a> {
+    pub path: &'a [u8],
+    pub copy_path: &'a [u8],
+}
+pub type CopyVec<'a> = Vec<CopyVecEntry<'a>>;
+
+/// Parents are stored in the dirstate as byte hashes.
+const PARENT_SIZE: usize = 20;
+/// Dirstate entries have a static part of 8 + 32 + 32 + 32 + 32 bits.
+const MIN_ENTRY_SIZE: usize = 17;
+
+pub fn parse_dirstate(
+    contents: &[u8],
+) -> Result<(DirstateParents, DirstateVec, CopyVec), DirstateParseError> {
+    if contents.len() < PARENT_SIZE * 2 {
+        return Err(DirstateParseError::TooLittleData);
+    }
+
+    let mut dirstate_vec = vec![];
+    let mut copies = vec![];
+    let mut curr_pos = PARENT_SIZE * 2;
+    let parents = DirstateParents {
+        p1: &contents[..PARENT_SIZE],
+        p2: &contents[PARENT_SIZE..curr_pos],
+    };
+
+    while curr_pos < contents.len() {
+        if curr_pos + MIN_ENTRY_SIZE > contents.len() {
+            return Err(DirstateParseError::Overflow);
+        }
+        let entry_bytes = &contents[curr_pos..];
+
+        let mut cursor = Cursor::new(entry_bytes);
+        let state = cursor.read_i8()?;
+        let mode = cursor.read_i32::<BigEndian>()?;
+        let size = cursor.read_i32::<BigEndian>()?;
+        let mtime = cursor.read_i32::<BigEndian>()?;
+        let path_len = cursor.read_i32::<BigEndian>()? as usize;
+
+        if path_len > contents.len() - curr_pos {
+            return Err(DirstateParseError::Overflow);
+        }
+
+        // Slice instead of allocating a Vec needed for `read_exact`
+        let path = &entry_bytes[MIN_ENTRY_SIZE..MIN_ENTRY_SIZE + (path_len)];
+
+        let (path, copy) = match memchr::memchr(0, path) {
+            None => (path, None),
+            Some(i) => (&path[..i], Some(&path[(i + 1)..])),
+        };
+
+        if let Some(copy_path) = copy {
+            copies.push(CopyVecEntry { path, copy_path });
+        };
+        dirstate_vec.push((
+            path.to_owned(),
+            DirstateEntry {
+                state,
+                mode,
+                size,
+                mtime,
+            },
+        ));
+        curr_pos = curr_pos + MIN_ENTRY_SIZE + (path_len);
+    }
+
+    Ok((parents, dirstate_vec, copies))
+}
+
+pub fn pack_dirstate(
+    dirstate_vec: &DirstateVec,
+    copymap: &HashMap<Vec<u8>, Vec<u8>>,
+    parents: DirstateParents,
+    now: i32,
+) -> Result<(Vec<u8>, DirstateVec), DirstatePackError> {
+    if parents.p1.len() != PARENT_SIZE || parents.p2.len() != PARENT_SIZE {
+        return Err(DirstatePackError::CorruptedParent);
+    }
+
+    let expected_size: usize = dirstate_vec
+        .iter()
+        .map(|(ref filename, _)| {
+            let mut length = MIN_ENTRY_SIZE + filename.len();
+            if let Some(ref copy) = copymap.get(filename) {
+                length += copy.len() + 1;
+            }
+            length
+        })
+        .sum();
+    let expected_size = expected_size + PARENT_SIZE * 2;
+
+    let mut packed = Vec::with_capacity(expected_size);
+    let mut new_dirstate_vec = vec![];
+
+    packed.extend(parents.p1);
+    packed.extend(parents.p2);
+
+    for (ref filename, entry) in dirstate_vec {
+        let mut new_filename: Vec<u8> = filename.to_owned();
+        let mut new_mtime: i32 = entry.mtime;
+        if entry.state == 'n' as i8 && entry.mtime == now.into() {
+            // The file was last modified "simultaneously" with the current
+            // write to dirstate (i.e. within the same second for file-
+            // systems with a granularity of 1 sec). This commonly happens
+            // for at least a couple of files on 'update'.
+            // The user could change the file without changing its size
+            // within the same second. Invalidate the file's mtime in
+            // dirstate, forcing future 'status' calls to compare the
+            // contents of the file if the size is the same. This prevents
+            // mistakenly treating such files as clean.
+            new_mtime = -1;
+            new_dirstate_vec.push((
+                filename.to_owned(),
+                DirstateEntry {
+                    mtime: new_mtime,
+                    ..*entry
+                },
+            ));
+        }
+
+        if let Some(copy) = copymap.get(filename) {
+            new_filename.push('\0' as u8);
+            new_filename.extend(copy);
+        }
+
+        packed.write_i8(entry.state)?;
+        packed.write_i32::<BigEndian>(entry.mode)?;
+        packed.write_i32::<BigEndian>(entry.size)?;
+        packed.write_i32::<BigEndian>(new_mtime)?;
+        packed.write_i32::<BigEndian>(new_filename.len() as i32)?;
+        packed.extend(new_filename)
+    }
+
+    if packed.len() != expected_size {
+        return Err(DirstatePackError::BadSize(expected_size, packed.len()));
+    }
+
+    Ok((packed, new_dirstate_vec))
+}
+
+#[cfg(test)]
+mod tests {
+    use super::*;
+
+    #[test]
+    fn test_pack_dirstate_empty() {
+        let dirstate_vec: DirstateVec = vec![];
+        let copymap = HashMap::new();
+        let parents = DirstateParents {
+            p1: b"12345678910111213141",
+            p2: b"00000000000000000000",
+        };
+        let now: i32 = 15000000;
+        let expected =
+            (b"1234567891011121314100000000000000000000".to_vec(), vec![]);
+
+        assert_eq!(
+            expected,
+            pack_dirstate(&dirstate_vec, &copymap, parents, now).unwrap()
+        );
+    }
+    #[test]
+    fn test_pack_dirstate_one_entry() {
+        let dirstate_vec: DirstateVec = vec![(
+            vec!['f' as u8, '1' as u8],
+            DirstateEntry {
+                state: 'n' as i8,
+                mode: 0o644,
+                size: 0,
+                mtime: 791231220,
+            },
+        )];
+        let copymap = HashMap::new();
+        let parents = DirstateParents {
+            p1: b"12345678910111213141",
+            p2: b"00000000000000000000",
+        };
+        let now: i32 = 15000000;
+        let expected = (
+            [
+                49, 50, 51, 52, 53, 54, 55, 56, 57, 49, 48, 49, 49, 49, 50,
+                49, 51, 49, 52, 49, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48,
+                48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 110, 0, 0, 1, 164, 0,
+                0, 0, 0, 47, 41, 58, 244, 0, 0, 0, 2, 102, 49,
+            ]
+            .to_vec(),
+            vec![],
+        );
+
+        assert_eq!(
+            expected,
+            pack_dirstate(&dirstate_vec, &copymap, parents, now).unwrap()
+        );
+    }
+    #[test]
+    fn test_pack_dirstate_one_entry_with_copy() {
+        let dirstate_vec: DirstateVec = vec![(
+            b"f1".to_vec(),
+            DirstateEntry {
+                state: 'n' as i8,
+                mode: 0o644,
+                size: 0,
+                mtime: 791231220,
+            },
+        )];
+        let mut copymap = HashMap::new();
+        copymap.insert(b"f1".to_vec(), b"copyname".to_vec());
+        let parents = DirstateParents {
+            p1: b"12345678910111213141",
+            p2: b"00000000000000000000",
+        };
+        let now: i32 = 15000000;
+        let expected = (
+            [
+                49, 50, 51, 52, 53, 54, 55, 56, 57, 49, 48, 49, 49, 49, 50,
+                49, 51, 49, 52, 49, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48,
+                48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 110, 0, 0, 1, 164, 0,
+                0, 0, 0, 47, 41, 58, 244, 0, 0, 0, 11, 102, 49, 0, 99, 111,
+                112, 121, 110, 97, 109, 101,
+            ]
+            .to_vec(),
+            vec![],
+        );
+
+        assert_eq!(
+            expected,
+            pack_dirstate(&dirstate_vec, &copymap, parents, now).unwrap()
+        );
+    }
+
+    #[test]
+    fn test_parse_pack_one_entry_with_copy() {
+        let dirstate_vec: DirstateVec = vec![(
+            b"f1".to_vec(),
+            DirstateEntry {
+                state: 'n' as i8,
+                mode: 0o644,
+                size: 0,
+                mtime: 791231220,
+            },
+        )];
+        let mut copymap = HashMap::new();
+        copymap.insert(b"f1".to_vec(), b"copyname".to_vec());
+        let parents = DirstateParents {
+            p1: b"12345678910111213141",
+            p2: b"00000000000000000000",
+        };
+        let now: i32 = 15000000;
+        let result =
+            pack_dirstate(&dirstate_vec, &copymap, parents, now).unwrap();
+
+        assert_eq!(
+            (
+                parents,
+                dirstate_vec,
+                copymap
+                    .iter()
+                    .map(|(k, v)| CopyVecEntry {
+                        path: k.as_slice(),
+                        copy_path: v.as_slice()
+                    })
+                    .collect()
+            ),
+            parse_dirstate(result.0.as_slice()).unwrap()
+        )
+    }
+
+    #[test]
+    fn test_parse_pack_multiple_entries_with_copy() {
+        let dirstate_vec: DirstateVec = vec![
+            (
+                b"f1".to_vec(),
+                DirstateEntry {
+                    state: 'n' as i8,
+                    mode: 0o644,
+                    size: 0,
+                    mtime: 791231220,
+                },
+            ),
+            (
+                b"f2".to_vec(),
+                DirstateEntry {
+                    state: 'm' as i8,
+                    mode: 0o777,
+                    size: 1000,
+                    mtime: 791231220,
+                },
+            ),
+            (
+                b"f3".to_vec(),
+                DirstateEntry {
+                    state: 'r' as i8,
+                    mode: 0o644,
+                    size: 234553,
+                    mtime: 791231220,
+                },
+            ),
+            (
+                b"f4\xF6".to_vec(),
+                DirstateEntry {
+                    state: 'a' as i8,
+                    mode: 0o644,
+                    size: -1,
+                    mtime: -1,
+                },
+            ),
+        ];
+        let mut copymap = HashMap::new();
+        copymap.insert(b"f1".to_vec(), b"copyname".to_vec());
+        copymap.insert(b"f4\xF6".to_vec(), b"copyname2".to_vec());
+        let parents = DirstateParents {
+            p1: b"12345678910111213141",
+            p2: b"00000000000000000000",
+        };
+        let now: i32 = 15000000;
+        let result =
+            pack_dirstate(&dirstate_vec, &copymap, parents, now).unwrap();
+
+        assert_eq!(
+            (parents, dirstate_vec, copymap),
+            parse_dirstate(result.0.as_slice())
+                .and_then(|(p, dvec, cvec)| Ok((
+                    p,
+                    dvec,
+                    cvec.iter()
+                        .map(|entry| (
+                            entry.path.to_vec(),
+                            entry.copy_path.to_vec()
+                        ))
+                        .collect()
+                )))
+                .unwrap()
+        )
+    }
+
+    #[test]
+    /// https://www.mercurial-scm.org/repo/hg/rev/af3f26b6bba4
+    fn test_parse_pack_one_entry_with_copy_and_time_conflict() {
+        let dirstate_vec: DirstateVec = vec![(
+            b"f1".to_vec(),
+            DirstateEntry {
+                state: 'n' as i8,
+                mode: 0o644,
+                size: 0,
+                mtime: 15000000,
+            },
+        )];
+        let mut copymap = HashMap::new();
+        copymap.insert(b"f1".to_vec(), b"copyname".to_vec());
+        let parents = DirstateParents {
+            p1: b"12345678910111213141",
+            p2: b"00000000000000000000",
+        };
+        let now: i32 = 15000000;
+        let result =
+            pack_dirstate(&dirstate_vec, &copymap, parents, now).unwrap();
+
+        assert_eq!(
+            (
+                parents,
+                vec![(
+                    b"f1".to_vec(),
+                    DirstateEntry {
+                        state: 'n' as i8,
+                        mode: 0o644,
+                        size: 0,
+                        mtime: -1
+                    }
+                )],
+                copymap
+                    .iter()
+                    .map(|(k, v)| CopyVecEntry {
+                        path: k.as_slice(),
+                        copy_path: v.as_slice()
+                    })
+                    .collect()
+            ),
+            parse_dirstate(result.0.as_slice()).unwrap()
+        )
+    }
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/rust/hg-core/src/discovery.rs	Mon May 20 11:40:47 2019 -0400
@@ -0,0 +1,209 @@
+// discovery.rs
+//
+// Copyright 2019 Georges Racinet <georges.racinet@octobus.net>
+//
+// This software may be used and distributed according to the terms of the
+// GNU General Public License version 2 or any later version.
+
+//! Discovery operations
+//!
+//! This is a Rust counterpart to the `partialdiscovery` class of
+//! `mercurial.setdiscovery`
+
+use super::{Graph, GraphError, Revision};
+use crate::ancestors::MissingAncestors;
+use crate::dagops;
+use std::collections::HashSet;
+
+pub struct PartialDiscovery<G: Graph + Clone> {
+    target_heads: Option<Vec<Revision>>,
+    graph: G, // plays the role of self._repo
+    common: MissingAncestors<G>,
+    undecided: Option<HashSet<Revision>>,
+    missing: HashSet<Revision>,
+}
+
+pub struct DiscoveryStats {
+    pub undecided: Option<usize>,
+}
+
+impl<G: Graph + Clone> PartialDiscovery<G> {
+    /// Create a PartialDiscovery object, with the intent
+    /// of comparing our `::<target_heads>` revset to the contents of another
+    /// repo.
+    ///
+    /// For now `target_heads` is passed as a vector, and will be used
+    /// at the first call to `ensure_undecided()`.
+    ///
+    /// If we want to make the signature more flexible,
+    /// we'll have to make it a type argument of `PartialDiscovery` or a trait
+    /// object since we'll keep it in the meanwhile
+    pub fn new(graph: G, target_heads: Vec<Revision>) -> Self {
+        PartialDiscovery {
+            undecided: None,
+            target_heads: Some(target_heads),
+            graph: graph.clone(),
+            common: MissingAncestors::new(graph, vec![]),
+            missing: HashSet::new(),
+        }
+    }
+
+    /// Register revisions known as being common
+    pub fn add_common_revisions(
+        &mut self,
+        common: impl IntoIterator<Item = Revision>,
+    ) -> Result<(), GraphError> {
+        self.common.add_bases(common);
+        if let Some(ref mut undecided) = self.undecided {
+            self.common.remove_ancestors_from(undecided)?;
+        }
+        Ok(())
+    }
+
+    /// Register revisions known as being missing
+    pub fn add_missing_revisions(
+        &mut self,
+        missing: impl IntoIterator<Item = Revision>,
+    ) -> Result<(), GraphError> {
+        self.ensure_undecided()?;
+        let range = dagops::range(
+            &self.graph,
+            missing,
+            self.undecided.as_ref().unwrap().iter().cloned(),
+        )?;
+        let undecided_mut = self.undecided.as_mut().unwrap();
+        for missrev in range {
+            self.missing.insert(missrev);
+            undecided_mut.remove(&missrev);
+        }
+        Ok(())
+    }
+
+    /// Do we have any information about the peer?
+    pub fn has_info(&self) -> bool {
+        self.common.has_bases()
+    }
+
+    /// Did we acquire full knowledge of our Revisions that the peer has?
+    pub fn is_complete(&self) -> bool {
+        self.undecided.as_ref().map_or(false, |s| s.is_empty())
+    }
+
+    /// Return the heads of the currently known common set of revisions.
+    ///
+    /// If the discovery process is not complete (see `is_complete()`), the
+    /// caller must be aware that this is an intermediate state.
+    ///
+    /// On the other hand, if it is complete, then this is currently
+    /// the only way to retrieve the end results of the discovery process.
+    ///
+    /// We may introduce in the future an `into_common_heads` call that
+    /// would be more appropriate for normal Rust callers, dropping `self`
+    /// if it is complete.
+    pub fn common_heads(&self) -> Result<HashSet<Revision>, GraphError> {
+        self.common.bases_heads()
+    }
+
+    /// Force first computation of `self.undecided`
+    ///
+    /// After this, `self.undecided.as_ref()` and `.as_mut()` can be
+    /// unwrapped to get workable immutable or mutable references without
+    /// any panic.
+    ///
+    /// This is an imperative call instead of an access with added lazyness
+    /// to reduce easily the scope of mutable borrow for the caller,
+    /// compared to undecided(&'a mut self) -> &'a… that would keep it
+    /// as long as the resulting immutable one.
+    fn ensure_undecided(&mut self) -> Result<(), GraphError> {
+        if self.undecided.is_some() {
+            return Ok(());
+        }
+        let tgt = self.target_heads.take().unwrap();
+        self.undecided =
+            Some(self.common.missing_ancestors(tgt)?.into_iter().collect());
+        Ok(())
+    }
+
+    /// Provide statistics about the current state of the discovery process
+    pub fn stats(&self) -> DiscoveryStats {
+        DiscoveryStats {
+            undecided: self.undecided.as_ref().map(|s| s.len()),
+        }
+    }
+}
+
+#[cfg(test)]
+mod tests {
+    use super::*;
+    use crate::testing::SampleGraph;
+
+    /// A PartialDiscovery as for pushing all the heads of `SampleGraph`
+    fn full_disco() -> PartialDiscovery<SampleGraph> {
+        PartialDiscovery::new(SampleGraph, vec![10, 11, 12, 13])
+    }
+
+    fn sorted_undecided(
+        disco: &PartialDiscovery<SampleGraph>,
+    ) -> Vec<Revision> {
+        let mut as_vec: Vec<Revision> =
+            disco.undecided.as_ref().unwrap().iter().cloned().collect();
+        as_vec.sort();
+        as_vec
+    }
+
+    fn sorted_missing(disco: &PartialDiscovery<SampleGraph>) -> Vec<Revision> {
+        let mut as_vec: Vec<Revision> =
+            disco.missing.iter().cloned().collect();
+        as_vec.sort();
+        as_vec
+    }
+
+    fn sorted_common_heads(
+        disco: &PartialDiscovery<SampleGraph>,
+    ) -> Result<Vec<Revision>, GraphError> {
+        let mut as_vec: Vec<Revision> =
+            disco.common_heads()?.iter().cloned().collect();
+        as_vec.sort();
+        Ok(as_vec)
+    }
+
+    #[test]
+    fn test_add_common_get_undecided() -> Result<(), GraphError> {
+        let mut disco = full_disco();
+        assert_eq!(disco.undecided, None);
+        assert!(!disco.has_info());
+        assert_eq!(disco.stats().undecided, None);
+
+        disco.add_common_revisions(vec![11, 12])?;
+        assert!(disco.has_info());
+        assert!(!disco.is_complete());
+        assert!(disco.missing.is_empty());
+
+        // add_common_revisions did not trigger a premature computation
+        // of `undecided`, let's check that and ask for them
+        assert_eq!(disco.undecided, None);
+        disco.ensure_undecided()?;
+        assert_eq!(sorted_undecided(&disco), vec![5, 8, 10, 13]);
+        assert_eq!(disco.stats().undecided, Some(4));
+        Ok(())
+    }
+
+    /// in this test, we pretend that our peer misses exactly (8+10)::
+    /// and we're comparing all our repo to it (as in a bare push)
+    #[test]
+    fn test_discovery() -> Result<(), GraphError> {
+        let mut disco = full_disco();
+        disco.add_common_revisions(vec![11, 12])?;
+        disco.add_missing_revisions(vec![8, 10])?;
+        assert_eq!(sorted_undecided(&disco), vec![5]);
+        assert_eq!(sorted_missing(&disco), vec![8, 10, 13]);
+        assert!(!disco.is_complete());
+
+        disco.add_common_revisions(vec![5])?;
+        assert_eq!(sorted_undecided(&disco), vec![]);
+        assert_eq!(sorted_missing(&disco), vec![8, 10, 13]);
+        assert!(disco.is_complete());
+        assert_eq!(sorted_common_heads(&disco)?, vec![5, 11, 12]);
+        Ok(())
+    }
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/rust/hg-core/src/filepatterns.rs	Mon May 20 11:40:47 2019 -0400
@@ -0,0 +1,345 @@
+use crate::{LineNumber, PatternError, PatternFileError};
+use regex::Regex;
+use std::collections::HashMap;
+use std::fs::File;
+use std::io::Read;
+use std::vec::Vec;
+
+lazy_static! {
+    static ref reescape: Vec<Vec<u8>> = {
+        let mut v: Vec<Vec<u8>> = (0..=255).map(|byte| vec![byte]).collect();
+        let to_escape = b"()[]{}?*+-|^$\\.&~# \t\n\r\x0b\x0c";
+        for byte in to_escape {
+            v[*byte as usize].insert(0, b'\\');
+        }
+        v
+    };
+}
+
+/// These are matched in order
+const GLOB_REPLACEMENTS: &[(&[u8], &[u8])] =
+    &[(b"*/", b"(?:.*/)?"), (b"*", b".*"), (b"", b"[^/]*")];
+
+#[derive(Debug, Copy, Clone, PartialEq, Eq)]
+pub enum PatternSyntax {
+    Regexp,
+    /// Glob that matches at the front of the path
+    RootGlob,
+    /// Glob that matches at any suffix of the path (still anchored at slashes)
+    Glob,
+    Path,
+    RelPath,
+    RelGlob,
+    RelRegexp,
+    RootFiles,
+}
+
+/// Transforms a glob pattern into a regex
+fn glob_to_re(pat: &[u8]) -> Vec<u8> {
+    let mut input = pat;
+    let mut res: Vec<u8> = vec![];
+    let mut group_depth = 0;
+
+    while let Some((c, rest)) = input.split_first() {
+        input = rest;
+
+        match c {
+            b'*' => {
+                for (source, repl) in GLOB_REPLACEMENTS {
+                    if input.starts_with(source) {
+                        input = &input[source.len()..];
+                        res.extend(*repl);
+                        break;
+                    }
+                }
+            }
+            b'?' => res.extend(b"."),
+            b'[' => {
+                match input.iter().skip(1).position(|b| *b == b']') {
+                    None => res.extend(b"\\["),
+                    Some(end) => {
+                        // Account for the one we skipped
+                        let end = end + 1;
+
+                        res.extend(b"[");
+
+                        for (i, b) in input[..end].iter().enumerate() {
+                            if *b == b'!' && i == 0 {
+                                res.extend(b"^")
+                            } else if *b == b'^' && i == 0 {
+                                res.extend(b"\\^")
+                            } else if *b == b'\\' {
+                                res.extend(b"\\\\")
+                            } else {
+                                res.push(*b)
+                            }
+                        }
+                        res.extend(b"]");
+                        input = &input[end + 1..];
+                    }
+                }
+            }
+            b'{' => {
+                group_depth += 1;
+                res.extend(b"(?:")
+            }
+            b'}' if group_depth > 0 => {
+                group_depth -= 1;
+                res.extend(b")");
+            }
+            b',' if group_depth > 0 => res.extend(b"|"),
+            b'\\' => {
+                let c = {
+                    if let Some((c, rest)) = input.split_first() {
+                        input = rest;
+                        c
+                    } else {
+                        c
+                    }
+                };
+                res.extend(&reescape[*c as usize])
+            }
+            _ => res.extend(&reescape[*c as usize]),
+        }
+    }
+    res
+}
+
+fn escape_pattern(pattern: &[u8]) -> Vec<u8> {
+    pattern
+        .iter()
+        .flat_map(|c| reescape[*c as usize].clone())
+        .collect()
+}
+
+fn parse_pattern_syntax(kind: &[u8]) -> Result<PatternSyntax, PatternError> {
+    match kind {
+        b"re" => Ok(PatternSyntax::Regexp),
+        b"path" => Ok(PatternSyntax::Path),
+        b"relpath" => Ok(PatternSyntax::RelPath),
+        b"rootfilesin" => Ok(PatternSyntax::RootFiles),
+        b"relglob" => Ok(PatternSyntax::RelGlob),
+        b"relre" => Ok(PatternSyntax::RelRegexp),
+        b"glob" => Ok(PatternSyntax::Glob),
+        b"rootglob" => Ok(PatternSyntax::RootGlob),
+        _ => Err(PatternError::UnsupportedSyntax(
+            String::from_utf8_lossy(kind).to_string(),
+        )),
+    }
+}
+
+/// Builds the regex that corresponds to the given pattern.
+/// If within a `syntax: regexp` context, returns the pattern,
+/// otherwise, returns the corresponding regex.
+fn _build_single_regex(
+    syntax: PatternSyntax,
+    pattern: &[u8],
+    globsuffix: &[u8],
+) -> Vec<u8> {
+    if pattern.is_empty() {
+        return vec![];
+    }
+    match syntax {
+        PatternSyntax::Regexp => pattern.to_owned(),
+        PatternSyntax::RelRegexp => {
+            if pattern[0] == b'^' {
+                return pattern.to_owned();
+            }
+            let mut res = b".*".to_vec();
+            res.extend(pattern);
+            res
+        }
+        PatternSyntax::Path | PatternSyntax::RelPath => {
+            if pattern == b"." {
+                return vec![];
+            }
+            let mut pattern = escape_pattern(pattern);
+            pattern.extend(b"(?:/|$)");
+            pattern
+        }
+        PatternSyntax::RootFiles => {
+            let mut res = if pattern == b"." {
+                vec![]
+            } else {
+                // Pattern is a directory name.
+                let mut as_vec: Vec<u8> = escape_pattern(pattern);
+                as_vec.push(b'/');
+                as_vec
+            };
+
+            // Anything after the pattern must be a non-directory.
+            res.extend(b"[^/]+$");
+            res
+        }
+        PatternSyntax::Glob
+        | PatternSyntax::RelGlob
+        | PatternSyntax::RootGlob => {
+            let mut res: Vec<u8> = vec![];
+            if syntax == PatternSyntax::RelGlob {
+                res.extend(b"(?:|.*/)");
+            }
+
+            res.extend(glob_to_re(pattern));
+            res.extend(globsuffix.iter());
+            res
+        }
+    }
+}
+
+const GLOB_SPECIAL_CHARACTERS: [u8; 7] =
+    [b'*', b'?', b'[', b']', b'{', b'}', b'\\'];
+
+/// Wrapper function to `_build_single_regex` that short-circuits 'exact' globs
+/// that don't need to be transformed into a regex.
+pub fn build_single_regex(
+    kind: &str,
+    pat: &[u8],
+    globsuffix: &[u8],
+) -> Result<Vec<u8>, PatternError> {
+    let enum_kind = parse_pattern_syntax(kind.as_bytes())?;
+    if enum_kind == PatternSyntax::RootGlob
+        && pat.iter().all(|b| GLOB_SPECIAL_CHARACTERS.contains(b))
+    {
+        Ok(pat.to_vec())
+    } else {
+        Ok(_build_single_regex(enum_kind, pat, globsuffix))
+    }
+}
+
+lazy_static! {
+    static ref SYNTAXES: HashMap<&'static str, &'static str> = {
+        let mut m = HashMap::new();
+
+        m.insert("re", "relre:");
+        m.insert("regexp", "relre:");
+        m.insert("glob", "relglob:");
+        m.insert("rootglob", "rootglob:");
+        m.insert("include", "include");
+        m.insert("subinclude", "subinclude");
+        m
+    };
+}
+
+pub type PatternTuple = (String, LineNumber, String);
+type WarningTuple = (String, String);
+
+pub fn parse_pattern_file_contents(
+    lines: &str,
+    file_path: &str,
+    warn: bool,
+) -> (Vec<PatternTuple>, Vec<WarningTuple>) {
+    let comment_regex = Regex::new(r"((?:^|[^\\])(?:\\\\)*)#.*").unwrap();
+    let mut inputs: Vec<PatternTuple> = vec![];
+    let mut warnings: Vec<WarningTuple> = vec![];
+
+    let mut current_syntax = "relre:";
+
+    let mut line = String::new();
+    for (line_number, line_str) in lines.split('\n').enumerate() {
+        let line_number = line_number + 1;
+        line.replace_range(.., line_str);
+
+        if line.contains('#') {
+            if let Some(cap) = comment_regex.captures(line.clone().as_ref()) {
+                line = line[..cap.get(1).unwrap().end()].to_string()
+            }
+            line = line.replace(r"\#", "#");
+        }
+
+        let mut line = line.trim_end();
+
+        if line.is_empty() {
+            continue;
+        }
+
+        if line.starts_with("syntax:") {
+            let syntax = line["syntax:".len()..].trim();
+
+            if let Some(rel_syntax) = SYNTAXES.get(syntax) {
+                current_syntax = rel_syntax;
+            } else if warn {
+                warnings.push((file_path.to_string(), syntax.to_string()));
+            }
+            continue;
+        }
+
+        let mut line_syntax: &str = &current_syntax;
+
+        for (s, rels) in SYNTAXES.iter() {
+            if line.starts_with(rels) {
+                line_syntax = rels;
+                line = &line[rels.len()..];
+                break;
+            } else if line.starts_with(&format!("{}:", s)) {
+                line_syntax = rels;
+                line = &line[s.len() + 1..];
+                break;
+            }
+        }
+
+        inputs.push((
+            format!("{}{}", line_syntax, line),
+            line_number,
+            line.to_string(),
+        ));
+    }
+    (inputs, warnings)
+}
+
+pub fn read_pattern_file(
+    file_path: String,
+    warn: bool,
+) -> Result<(Vec<PatternTuple>, Vec<WarningTuple>), PatternFileError> {
+    let mut f = File::open(&file_path)?;
+    let mut contents = String::new();
+
+    f.read_to_string(&mut contents)?;
+
+    Ok(parse_pattern_file_contents(&contents, &file_path, warn))
+}
+
+#[cfg(test)]
+mod tests {
+    use super::*;
+
+    #[test]
+    fn escape_pattern_test() {
+        let untouched = br#"!"%',/0123456789:;<=>@ABCDEFGHIJKLMNOPQRSTUVWXYZ_`abcdefghijklmnopqrstuvwxyz"#;
+        assert_eq!(escape_pattern(untouched), untouched.to_vec());
+        // All escape codes
+        assert_eq!(
+            escape_pattern(br#"()[]{}?*+-|^$\\.&~# \t\n\r\v\f"#),
+            br#"\(\)\[\]\{\}\?\*\+\-\|\^\$\\\\\.\&\~\#\ \\t\\n\\r\\v\\f"#
+                .to_vec()
+        );
+    }
+
+    #[test]
+    fn glob_test() {
+        assert_eq!(glob_to_re(br#"?"#), br#"."#);
+        assert_eq!(glob_to_re(br#"*"#), br#"[^/]*"#);
+        assert_eq!(glob_to_re(br#"**"#), br#".*"#);
+        assert_eq!(glob_to_re(br#"**/a"#), br#"(?:.*/)?a"#);
+        assert_eq!(glob_to_re(br#"a/**/b"#), br#"a/(?:.*/)?b"#);
+        assert_eq!(glob_to_re(br#"[a*?!^][^b][!c]"#), br#"[a*?!^][\^b][^c]"#);
+        assert_eq!(glob_to_re(br#"{a,b}"#), br#"(?:a|b)"#);
+        assert_eq!(glob_to_re(br#".\*\?"#), br#"\.\*\?"#);
+    }
+
+    #[test]
+    fn test_parse_pattern_file_contents() {
+        let lines = "syntax: glob\n*.elc";
+
+        assert_eq!(
+            vec![("relglob:*.elc".to_string(), 2, "*.elc".to_string())],
+            parse_pattern_file_contents(lines, "file_path", false).0,
+        );
+
+        let lines = "syntax: include\nsyntax: glob";
+
+        assert_eq!(
+            parse_pattern_file_contents(lines, "file_path", false).0,
+            vec![]
+        );
+    }
+}
--- a/rust/hg-core/src/lib.rs	Mon May 20 10:08:28 2019 +0200
+++ b/rust/hg-core/src/lib.rs	Mon May 20 11:40:47 2019 -0400
@@ -2,10 +2,27 @@
 //
 // This software may be used and distributed according to the terms of the
 // GNU General Public License version 2 or any later version.
+extern crate byteorder;
+extern crate memchr;
+#[macro_use]
+extern crate lazy_static;
+extern crate regex;
+
 mod ancestors;
 pub mod dagops;
 pub use ancestors::{AncestorsIterator, LazyAncestors, MissingAncestors};
-pub mod testing;  // unconditionally built, for use from integration tests
+mod dirstate;
+pub mod discovery;
+pub mod testing; // unconditionally built, for use from integration tests
+pub use dirstate::{
+    pack_dirstate, parse_dirstate, CopyVec, CopyVecEntry, DirstateEntry,
+    DirstateParents, DirstateVec,
+};
+mod filepatterns;
+
+pub use filepatterns::{
+    build_single_regex, read_pattern_file, PatternSyntax, PatternTuple,
+};
 
 /// Mercurial revision numbers
 ///
@@ -13,7 +30,6 @@
 /// 4 bytes, and are liberally converted to ints, whence the i32
 pub type Revision = i32;
 
-
 /// Marker expressing the absence of a parent
 ///
 /// Independently of the actual representation, `NULL_REVISION` is guaranteed
@@ -34,8 +50,53 @@
     fn parents(&self, Revision) -> Result<[Revision; 2], GraphError>;
 }
 
+pub type LineNumber = usize;
+
 #[derive(Clone, Debug, PartialEq)]
 pub enum GraphError {
     ParentOutOfRange(Revision),
     WorkingDirectoryUnsupported,
 }
+
+#[derive(Clone, Debug, PartialEq)]
+pub enum DirstateParseError {
+    TooLittleData,
+    Overflow,
+    CorruptedEntry(String),
+}
+
+#[derive(Debug, PartialEq)]
+pub enum DirstatePackError {
+    CorruptedEntry(String),
+    CorruptedParent,
+    BadSize(usize, usize),
+}
+
+impl From<std::io::Error> for DirstatePackError {
+    fn from(e: std::io::Error) -> Self {
+        DirstatePackError::CorruptedEntry(e.to_string())
+    }
+}
+
+impl From<std::io::Error> for DirstateParseError {
+    fn from(e: std::io::Error) -> Self {
+        DirstateParseError::CorruptedEntry(e.to_string())
+    }
+}
+
+#[derive(Debug)]
+pub enum PatternError {
+    UnsupportedSyntax(String),
+}
+
+#[derive(Debug)]
+pub enum PatternFileError {
+    IO(std::io::Error),
+    Pattern(PatternError, LineNumber),
+}
+
+impl From<std::io::Error> for PatternFileError {
+    fn from(e: std::io::Error) -> Self {
+        PatternFileError::IO(e)
+    }
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/rust/hg-cpython/src/dirstate.rs	Mon May 20 11:40:47 2019 -0400
@@ -0,0 +1,227 @@
+// dirstate.rs
+//
+// Copyright 2019 Raphaël Gomès <rgomes@octobus.net>
+//
+// This software may be used and distributed according to the terms of the
+// GNU General Public License version 2 or any later version.
+
+//! Bindings for the `hg::dirstate` module provided by the
+//! `hg-core` package.
+//!
+//! From Python, this will be seen as `mercurial.rustext.dirstate`
+
+use cpython::{
+    exc, PyBytes, PyDict, PyErr, PyInt, PyModule, PyObject, PyResult,
+    PySequence, PyTuple, Python, ToPyObject,
+};
+use hg::{
+    pack_dirstate, parse_dirstate, CopyVecEntry, DirstateEntry,
+    DirstatePackError, DirstateParents, DirstateParseError, DirstateVec,
+};
+use std::collections::HashMap;
+use std::ffi::CStr;
+#[cfg(feature = "python27")]
+extern crate python27_sys as python_sys;
+#[cfg(feature = "python3")]
+extern crate python3_sys as python_sys;
+use self::python_sys::PyCapsule_Import;
+use libc::{c_char, c_int};
+use std::mem::transmute;
+
+/// C code uses a custom `dirstate_tuple` type, checks in multiple instances
+/// for this type, and raises a Python `Exception` if the check does not pass.
+/// Because this type differs only in name from the regular Python tuple, it
+/// would be a good idea in the near future to remove it entirely to allow
+/// for a pure Python tuple of the same effective structure to be used,
+/// rendering this type and the capsule below useless.
+type MakeDirstateTupleFn = extern "C" fn(
+    state: c_char,
+    mode: c_int,
+    size: c_int,
+    mtime: c_int,
+) -> PyObject;
+
+/// This is largely a copy/paste from cindex.rs, pending the merge of a
+/// `py_capsule_fn!` macro in the rust-cpython project:
+/// https://github.com/dgrunwald/rust-cpython/pull/169
+fn decapsule_make_dirstate_tuple(py: Python) -> PyResult<MakeDirstateTupleFn> {
+    unsafe {
+        let caps_name = CStr::from_bytes_with_nul_unchecked(
+            b"mercurial.cext.parsers.make_dirstate_tuple_CAPI\0",
+        );
+        let from_caps = PyCapsule_Import(caps_name.as_ptr(), 0);
+        if from_caps.is_null() {
+            return Err(PyErr::fetch(py));
+        }
+        Ok(transmute(from_caps))
+    }
+}
+
+fn parse_dirstate_wrapper(
+    py: Python,
+    dmap: PyDict,
+    copymap: PyDict,
+    st: PyBytes,
+) -> PyResult<PyTuple> {
+    match parse_dirstate(st.data(py)) {
+        Ok((parents, dirstate_vec, copies)) => {
+            for (filename, entry) in dirstate_vec {
+                dmap.set_item(
+                    py,
+                    PyBytes::new(py, &filename[..]),
+                    decapsule_make_dirstate_tuple(py)?(
+                        entry.state,
+                        entry.mode,
+                        entry.size,
+                        entry.mtime,
+                    ),
+                )?;
+            }
+            for CopyVecEntry { path, copy_path } in copies {
+                copymap.set_item(
+                    py,
+                    PyBytes::new(py, path),
+                    PyBytes::new(py, copy_path),
+                )?;
+            }
+            Ok((PyBytes::new(py, parents.p1), PyBytes::new(py, parents.p2))
+                .to_py_object(py))
+        }
+        Err(e) => Err(PyErr::new::<exc::ValueError, _>(
+            py,
+            match e {
+                DirstateParseError::TooLittleData => {
+                    "too little data for parents".to_string()
+                }
+                DirstateParseError::Overflow => {
+                    "overflow in dirstate".to_string()
+                }
+                DirstateParseError::CorruptedEntry(e) => e,
+            },
+        )),
+    }
+}
+
+fn pack_dirstate_wrapper(
+    py: Python,
+    dmap: PyDict,
+    copymap: PyDict,
+    pl: PyTuple,
+    now: PyInt,
+) -> PyResult<PyBytes> {
+    let p1 = pl.get_item(py, 0).extract::<PyBytes>(py)?;
+    let p1: &[u8] = p1.data(py);
+    let p2 = pl.get_item(py, 1).extract::<PyBytes>(py)?;
+    let p2: &[u8] = p2.data(py);
+
+    let dirstate_vec: Result<DirstateVec, PyErr> = dmap
+        .items(py)
+        .iter()
+        .map(|(filename, stats)| {
+            let stats = stats.extract::<PySequence>(py)?;
+            let state = stats.get_item(py, 0)?.extract::<PyBytes>(py)?;
+            let state = state.data(py)[0] as i8;
+            let mode = stats.get_item(py, 1)?.extract(py)?;
+            let size = stats.get_item(py, 2)?.extract(py)?;
+            let mtime = stats.get_item(py, 3)?.extract(py)?;
+            let filename = filename.extract::<PyBytes>(py)?;
+            let filename = filename.data(py);
+            Ok((
+                filename.to_owned(),
+                DirstateEntry {
+                    state,
+                    mode,
+                    size,
+                    mtime,
+                },
+            ))
+        })
+        .collect();
+
+    let copies: Result<HashMap<Vec<u8>, Vec<u8>>, PyErr> = copymap
+        .items(py)
+        .iter()
+        .map(|(key, value)| {
+            Ok((
+                key.extract::<PyBytes>(py)?.data(py).to_owned(),
+                value.extract::<PyBytes>(py)?.data(py).to_owned(),
+            ))
+        })
+        .collect();
+
+    match pack_dirstate(
+        &dirstate_vec?,
+        &copies?,
+        DirstateParents { p1, p2 },
+        now.value(py) as i32,
+    ) {
+        Ok((packed, new_dirstate_vec)) => {
+            for (
+                filename,
+                DirstateEntry {
+                    state,
+                    mode,
+                    size,
+                    mtime,
+                },
+            ) in new_dirstate_vec
+            {
+                dmap.set_item(
+                    py,
+                    PyBytes::new(py, &filename[..]),
+                    decapsule_make_dirstate_tuple(py)?(
+                        state, mode, size, mtime,
+                    ),
+                )?;
+            }
+            Ok(PyBytes::new(py, &packed))
+        }
+        Err(error) => Err(PyErr::new::<exc::ValueError, _>(
+            py,
+            match error {
+                DirstatePackError::CorruptedParent => {
+                    "expected a 20-byte hash".to_string()
+                }
+                DirstatePackError::CorruptedEntry(e) => e,
+                DirstatePackError::BadSize(expected, actual) => {
+                    format!("bad dirstate size: {} != {}", actual, expected)
+                }
+            },
+        )),
+    }
+}
+
+/// Create the module, with `__package__` given from parent
+pub fn init_module(py: Python, package: &str) -> PyResult<PyModule> {
+    let dotted_name = &format!("{}.dirstate", package);
+    let m = PyModule::new(py, dotted_name)?;
+    m.add(py, "__package__", package)?;
+    m.add(py, "__doc__", "Dirstate - Rust implementation")?;
+    m.add(
+        py,
+        "parse_dirstate",
+        py_fn!(
+            py,
+            parse_dirstate_wrapper(dmap: PyDict, copymap: PyDict, st: PyBytes)
+        ),
+    )?;
+    m.add(
+        py,
+        "pack_dirstate",
+        py_fn!(
+            py,
+            pack_dirstate_wrapper(
+                dmap: PyDict,
+                copymap: PyDict,
+                pl: PyTuple,
+                now: PyInt
+            )
+        ),
+    )?;
+
+    let sys = PyModule::import(py, "sys")?;
+    let sys_modules: PyDict = sys.get(py, "modules")?.extract(py)?;
+    sys_modules.set_item(py, dotted_name, &m)?;
+
+    Ok(m)
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/rust/hg-cpython/src/discovery.rs	Mon May 20 11:40:47 2019 -0400
@@ -0,0 +1,127 @@
+// discovery.rs
+//
+// Copyright 2018 Georges Racinet <gracinet@anybox.fr>
+//
+// This software may be used and distributed according to the terms of the
+// GNU General Public License version 2 or any later version.
+
+//! Bindings for the `hg::discovery` module provided by the
+//! `hg-core` crate. From Python, this will be seen as `rustext.discovery`
+//!
+//! # Classes visible from Python:
+//! - [`PartialDiscover`] is the Rust implementation of
+//!   `mercurial.setdiscovery.partialdiscovery`.
+
+use crate::conversion::{py_set, rev_pyiter_collect};
+use cindex::Index;
+use cpython::{
+    ObjectProtocol, PyDict, PyModule, PyObject, PyResult, Python,
+    PythonObject, ToPyObject,
+};
+use exceptions::GraphError;
+use hg::discovery::PartialDiscovery as CorePartialDiscovery;
+use hg::Revision;
+
+use std::cell::RefCell;
+
+py_class!(pub class PartialDiscovery |py| {
+    data inner: RefCell<Box<CorePartialDiscovery<Index>>>;
+
+    def __new__(
+        _cls,
+        index: PyObject,
+        targetheads: PyObject
+    ) -> PyResult<PartialDiscovery> {
+        Self::create_instance(
+            py,
+            RefCell::new(Box::new(CorePartialDiscovery::new(
+                Index::new(py, index)?,
+                rev_pyiter_collect(py, &targetheads)?,
+            )))
+        )
+    }
+
+    def addcommons(&self, commons: PyObject) -> PyResult<PyObject> {
+        let mut inner = self.inner(py).borrow_mut();
+        let commons_vec: Vec<Revision> = rev_pyiter_collect(py, &commons)?;
+        inner.add_common_revisions(commons_vec)
+            .map_err(|e| GraphError::pynew(py, e))?;
+        Ok(py.None())
+    }
+
+    def addmissings(&self, missings: PyObject) -> PyResult<PyObject> {
+        let mut inner = self.inner(py).borrow_mut();
+        let missings_vec: Vec<Revision> = rev_pyiter_collect(py, &missings)?;
+        inner.add_missing_revisions(missings_vec)
+            .map_err(|e| GraphError::pynew(py, e))?;
+        Ok(py.None())
+    }
+
+    def addinfo(&self, sample: PyObject) -> PyResult<PyObject> {
+        let mut missing: Vec<Revision> = Vec::new();
+        let mut common: Vec<Revision> = Vec::new();
+        for info in sample.iter(py)? { // info is a pair (Revision, bool)
+            let mut revknown = info?.iter(py)?;
+            let rev: Revision = revknown.next().unwrap()?.extract(py)?;
+            let known: bool = revknown.next().unwrap()?.extract(py)?;
+            if known {
+                common.push(rev);
+            } else {
+                missing.push(rev);
+            }
+        }
+        let mut inner = self.inner(py).borrow_mut();
+        inner.add_common_revisions(common)
+            .map_err(|e| GraphError::pynew(py, e))?;
+        inner.add_missing_revisions(missing)
+            .map_err(|e| GraphError::pynew(py, e))?;
+        Ok(py.None())
+    }
+
+    def hasinfo(&self) -> PyResult<bool> {
+        Ok(self.inner(py).borrow().has_info())
+    }
+
+    def iscomplete(&self) -> PyResult<bool> {
+        Ok(self.inner(py).borrow().is_complete())
+    }
+
+    def stats(&self) -> PyResult<PyDict> {
+        let stats = self.inner(py).borrow().stats();
+        let as_dict: PyDict = PyDict::new(py);
+        as_dict.set_item(py, "undecided",
+                         stats.undecided.map(
+                             |l| l.to_py_object(py).into_object())
+                             .unwrap_or_else(|| py.None()))?;
+        Ok(as_dict)
+    }
+
+    def commonheads(&self) -> PyResult<PyObject> {
+        py_set(
+            py,
+            &self.inner(py).borrow().common_heads()
+                .map_err(|e| GraphError::pynew(py, e))?
+        )
+    }
+});
+
+/// Create the module, with __package__ given from parent
+pub fn init_module(py: Python, package: &str) -> PyResult<PyModule> {
+    let dotted_name = &format!("{}.discovery", package);
+    let m = PyModule::new(py, dotted_name)?;
+    m.add(py, "__package__", package)?;
+    m.add(
+        py,
+        "__doc__",
+        "Discovery of common node sets - Rust implementation",
+    )?;
+    m.add_class::<PartialDiscovery>(py)?;
+
+    let sys = PyModule::import(py, "sys")?;
+    let sys_modules: PyDict = sys.get(py, "modules")?.extract(py)?;
+    sys_modules.set_item(py, dotted_name, &m)?;
+    // Example C code (see pyexpat.c and import.c) will "give away the
+    // reference", but we won't because it will be consumed once the
+    // Rust PyObject is dropped.
+    Ok(m)
+}
--- a/rust/hg-cpython/src/exceptions.rs	Mon May 20 10:08:28 2019 +0200
+++ b/rust/hg-cpython/src/exceptions.rs	Mon May 20 11:40:47 2019 -0400
@@ -12,8 +12,8 @@
 //! existing Python exceptions if appropriate.
 //!
 //! [`GraphError`]: struct.GraphError.html
-use cpython::exc::ValueError;
-use cpython::{PyErr, Python};
+use cpython::exc::{ValueError, RuntimeError};
+use cpython::{PyErr, Python, exc};
 use hg;
 
 py_exception!(rustext, GraphError, ValueError);
@@ -28,9 +28,43 @@
                 match py
                     .import("mercurial.error")
                     .and_then(|m| m.get(py, "WdirUnsupported"))
-                {
-                    Err(e) => e,
-                    Ok(cls) => PyErr::from_instance(py, cls),
+                    {
+                        Err(e) => e,
+                        Ok(cls) => PyErr::from_instance(py, cls),
+                    }
+            }
+        }
+    }
+}
+
+py_exception!(rustext, PatternError, RuntimeError);
+py_exception!(rustext, PatternFileError, RuntimeError);
+
+impl PatternError {
+    pub fn pynew(py: Python, inner: hg::PatternError) -> PyErr {
+        match inner {
+            hg::PatternError::UnsupportedSyntax(m) => {
+                PatternError::new(py, ("PatternError", m))
+            }
+        }
+    }
+}
+
+
+impl PatternFileError {
+    pub fn pynew(py: Python, inner: hg::PatternFileError) -> PyErr {
+        match inner {
+            hg::PatternFileError::IO(e) => {
+                let value = (
+                    e.raw_os_error().unwrap_or(2),
+                    e.to_string()
+                );
+                PyErr::new::<exc::IOError, _>(py, value)
+            }
+            hg::PatternFileError::Pattern(e, l) => {
+                match e {
+                    hg::PatternError::UnsupportedSyntax(m) =>
+                        PatternFileError::new(py, ("PatternFileError", m, l))
                 }
             }
         }
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/rust/hg-cpython/src/filepatterns.rs	Mon May 20 11:40:47 2019 -0400
@@ -0,0 +1,115 @@
+// filepatterns.rs
+//
+// Copyright 2019, Georges Racinet <gracinet@anybox.fr>,
+// Raphaël Gomès <rgomes@octobus.net>
+//
+// This software may be used and distributed according to the terms of the
+// GNU General Public License version 2 or any later version.
+
+//! Bindings for the `hg::filepatterns` module provided by the
+//! `hg-core` crate. From Python, this will be seen as `rustext.filepatterns`
+//! and can be used as replacement for the the pure `filepatterns` Python module.
+//!
+use cpython::{
+    exc, PyDict, PyErr, PyModule, PyResult, PyString, PyTuple, Python,
+    ToPyObject,
+};
+use hg::{build_single_regex, read_pattern_file, PatternTuple};
+use exceptions::{
+    PatternError,
+    PatternFileError,
+};
+
+/// Rust does not like functions with different return signatures.
+/// The 3-tuple version is always returned by the hg-core function,
+/// the (potential) conversion is handled at this level since it is not likely
+/// to have any measurable impact on performance.
+///
+/// The Python implementation passes a function reference for `warn` instead
+/// of a boolean that is used to emit warnings while parsing. The Rust
+/// implementation chooses to accumulate the warnings and propagate them to
+/// Python upon completion. See the `readpatternfile` function in `match.py`
+/// for more details.
+fn read_pattern_file_wrapper(
+    py: Python,
+    file_path: String,
+    warn: bool,
+    source_info: bool,
+) -> PyResult<PyTuple> {
+    match read_pattern_file(file_path, warn) {
+        Ok((patterns, warnings)) => {
+            if source_info {
+                return Ok((patterns, warnings).to_py_object(py));
+            }
+            let itemgetter = |x: &PatternTuple| x.0.to_py_object(py);
+            let results: Vec<PyString> =
+                patterns.iter().map(itemgetter).collect();
+            Ok((results, warnings).to_py_object(py))
+        }
+        Err(e) => Err(PatternFileError::pynew(py, e)),
+    }
+}
+
+fn build_single_regex_wrapper(
+    py: Python,
+    kind: String,
+    pat: String,
+    globsuffix: String,
+) -> PyResult<PyString> {
+    match build_single_regex(
+        kind.as_ref(),
+        pat.as_bytes(),
+        globsuffix.as_bytes(),
+    ) {
+        Ok(regex) => match String::from_utf8(regex) {
+            Ok(regex) => Ok(regex.to_py_object(py)),
+            Err(e) => Err(PyErr::new::<exc::UnicodeDecodeError, _>(
+                py,
+                e.to_string(),
+            )),
+        },
+        Err(e) => Err(PatternError::pynew(py, e)),
+    }
+}
+
+pub fn init_module(py: Python, package: &str) -> PyResult<PyModule> {
+    let dotted_name = &format!("{}.filepatterns", package);
+    let m = PyModule::new(py, dotted_name)?;
+
+    m.add(py, "__package__", package)?;
+    m.add(
+        py,
+        "__doc__",
+        "Patterns files parsing - Rust implementation",
+    )?;
+    m.add(
+        py,
+        "build_single_regex",
+        py_fn!(
+            py,
+            build_single_regex_wrapper(
+                kind: String,
+                pat: String,
+                globsuffix: String
+            )
+        ),
+    )?;
+    m.add(
+        py,
+        "read_pattern_file",
+        py_fn!(
+            py,
+            read_pattern_file_wrapper(
+                file_path: String,
+                warn: bool,
+                source_info: bool
+            )
+        ),
+    )?;
+
+    let sys = PyModule::import(py, "sys")?;
+    let sys_modules: PyDict = sys.get(py, "modules")?.extract(py)?;
+    sys_modules.set_item(py, dotted_name, &m)?;
+
+    Ok(m)
+}
--- a/rust/hg-cpython/src/lib.rs	Mon May 20 10:08:28 2019 +0200
+++ b/rust/hg-cpython/src/lib.rs	Mon May 20 11:40:47 2019 -0400
@@ -23,12 +23,16 @@
 extern crate cpython;
 extern crate hg;
 extern crate libc;
+extern crate python27_sys;
 
 pub mod ancestors;
 mod cindex;
 mod conversion;
 pub mod dagops;
+pub mod discovery;
 pub mod exceptions;
+pub mod dirstate;
+pub mod filepatterns;
 
 py_module_initializer!(rustext, initrustext, PyInit_rustext, |py, m| {
     m.add(
@@ -40,6 +44,11 @@
     let dotted_name: String = m.get(py, "__name__")?.extract(py)?;
     m.add(py, "ancestor", ancestors::init_module(py, &dotted_name)?)?;
     m.add(py, "dagop", dagops::init_module(py, &dotted_name)?)?;
+    m.add(py, "discovery", discovery::init_module(py, &dotted_name)?)?;
+    m.add(py, "dirstate", dirstate::init_module(py, &dotted_name)?)?;
+    m.add(py, "filepatterns", filepatterns::init_module(py, &dotted_name)?)?;
     m.add(py, "GraphError", py.get_type::<exceptions::GraphError>())?;
+    m.add(py, "PatternFileError", py.get_type::<exceptions::PatternFileError>())?;
+    m.add(py, "PatternError", py.get_type::<exceptions::PatternError>())?;
     Ok(())
 });
--- a/tests/common-pattern.py	Mon May 20 10:08:28 2019 +0200
+++ b/tests/common-pattern.py	Mon May 20 11:40:47 2019 -0400
@@ -115,6 +115,11 @@
 # Various platform error strings, keyed on a common replacement string
 _errors = {
     br'$ENOENT$': (
+        # IOError in Python does not have the same error message
+        # than in Rust, and automatic conversion is not possible
+        # because of module member privacy.
+        br'No such file or directory \(os error 2\)',
+
         # strerror()
         br'No such file or directory',
 
--- a/tests/fakedirstatewritetime.py	Mon May 20 10:08:28 2019 +0200
+++ b/tests/fakedirstatewritetime.py	Mon May 20 11:40:47 2019 -0400
@@ -16,6 +16,12 @@
 )
 from mercurial.utils import dateutil
 
+try:
+    from mercurial import rustext
+    rustext.__name__  # force actual import (see hgdemandimport)
+except ImportError:
+    rustext = None
+
 configtable = {}
 configitem = registrar.configitem(configtable)
 
@@ -51,16 +57,22 @@
     # 'fakenow' value and 'touch -t YYYYmmddHHMM' argument easy
     fakenow = dateutil.parsedate(fakenow, [b'%Y%m%d%H%M'])[0]
 
-    orig_pack_dirstate = parsers.pack_dirstate
+    if rustext is not None:
+        orig_module = rustext.dirstate
+        orig_pack_dirstate = rustext.dirstate.pack_dirstate
+    else:
+        orig_module = parsers
+        orig_pack_dirstate = parsers.pack_dirstate
+
     orig_dirstate_getfsnow = dirstate._getfsnow
     wrapper = lambda *args: pack_dirstate(fakenow, orig_pack_dirstate, *args)
 
-    parsers.pack_dirstate = wrapper
+    orig_module.pack_dirstate = wrapper
     dirstate._getfsnow = lambda *args: fakenow
     try:
         return func()
     finally:
-        parsers.pack_dirstate = orig_pack_dirstate
+        orig_module.pack_dirstate = orig_pack_dirstate
         dirstate._getfsnow = orig_dirstate_getfsnow
 
 def _poststatusfixup(orig, workingctx, status, fixup):
@@ -74,5 +86,5 @@
 def extsetup(ui):
     extensions.wrapfunction(context.workingctx, '_poststatusfixup',
                             _poststatusfixup)
-    extensions.wrapfunction(context.committablectx, 'markcommitted',
+    extensions.wrapfunction(context.workingctx, 'markcommitted',
                             markcommitted)
--- a/tests/phabricator/phabsend-create-alpha.json	Mon May 20 10:08:28 2019 +0200
+++ b/tests/phabricator/phabsend-create-alpha.json	Mon May 20 11:40:47 2019 -0400
@@ -1,617 +1,617 @@
 {
+    "version": 1,
     "interactions": [
         {
             "request": {
-                "method": "POST",
-                "body": "constraints%5Bcallsigns%5D%5B0%5D=HG&api.token=cli-hahayouwish",
-                "uri": "https://phab.mercurial-scm.org//api/diffusion.repository.search",
                 "headers": {
+                    "user-agent": [
+                        "mercurial/proto-1.0 (Mercurial 5.0+93-d811f17090a3+20190505)"
+                    ],
+                    "content-length": [
+                        "93"
+                    ],
                     "content-type": [
                         "application/x-www-form-urlencoded"
                     ],
                     "accept": [
                         "application/mercurial-0.1"
                     ],
-                    "user-agent": [
-                        "mercurial/proto-1.0 (Mercurial 4.9+477-7c86ec0ca5c5+20190303)"
-                    ],
                     "host": [
                         "phab.mercurial-scm.org"
-                    ],
-                    "content-length": [
-                        "79"
                     ]
-                }
+                },
+                "body": "api.token=cli-hahayouwish&constraints%5Bcallsigns%5D%5B0%5D=HG",
+                "uri": "https://phab.mercurial-scm.org//api/diffusion.repository.search",
+                "method": "POST"
             },
             "response": {
                 "status": {
                     "code": 200,
                     "message": "OK"
                 },
-                "body": {
-                    "string": "{\"result\":{\"data\":[{\"id\":2,\"type\":\"REPO\",\"phid\":\"PHID-REPO-bvunnehri4u2isyr7bc3\",\"fields\":{\"name\":\"Mercurial\",\"vcs\":\"hg\",\"callsign\":\"HG\",\"shortName\":null,\"status\":\"active\",\"isImporting\":false,\"spacePHID\":null,\"dateCreated\":1498761653,\"dateModified\":1500403184,\"policy\":{\"view\":\"public\",\"edit\":\"admin\",\"diffusion.push\":\"users\"}},\"attachments\":{}}],\"maps\":{},\"query\":{\"queryKey\":null},\"cursor\":{\"limit\":100,\"after\":null,\"before\":null,\"order\":null}},\"error_code\":null,\"error_info\":null}"
-                },
                 "headers": {
-                    "expires": [
-                        "Sat, 01 Jan 2000 00:00:00 GMT"
-                    ],
                     "x-xss-protection": [
                         "1; mode=block"
                     ],
-                    "transfer-encoding": [
-                        "chunked"
-                    ],
-                    "date": [
-                        "Sun, 03 Mar 2019 00:12:23 GMT"
-                    ],
-                    "x-frame-options": [
-                        "Deny"
-                    ],
                     "cache-control": [
                         "no-store"
                     ],
                     "content-type": [
                         "application/json"
                     ],
+                    "date": [
+                        "Sun, 05 May 2019 13:31:00 GMT"
+                    ],
+                    "connection": [
+                        "keep-alive"
+                    ],
+                    "strict-transport-security": [
+                        "max-age=31536000; includeSubdomains; preload"
+                    ],
+                    "vary": [
+                        "Accept-Encoding"
+                    ],
+                    "x-frame-options": [
+                        "Deny"
+                    ],
+                    "content-length": [
+                        "549"
+                    ],
                     "x-content-type-options": [
                         "nosniff"
                     ],
-                    "server": [
-                        "Apache/2.4.10 (Debian)"
-                    ],
-                    "set-cookie": [
-                        "phsid=A%2Fpywot5xerq4gs2tjxw3gnadzdg6vomqmfcnwqddp; expires=Fri, 01-Mar-2024 00:12:23 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly"
-                    ],
-                    "strict-transport-security": [
-                        "max-age=0; includeSubdomains; preload"
+                    "expires": [
+                        "Sat, 01 Jan 2000 00:00:00 GMT"
                     ]
+                },
+                "body": {
+                    "string": "{\"result\":{\"data\":[{\"id\":10,\"type\":\"REPO\",\"phid\":\"PHID-REPO-bvunnehri4u2isyr7bc3\",\"fields\":{\"name\":\"Mercurial\",\"vcs\":\"hg\",\"callsign\":\"HG\",\"shortName\":\"Mercurial\",\"status\":\"active\",\"isImporting\":false,\"almanacServicePHID\":null,\"spacePHID\":null,\"dateCreated\":1507817156,\"dateModified\":1529613276,\"policy\":{\"view\":\"public\",\"edit\":\"admin\",\"diffusion.push\":\"users\"}},\"attachments\":{}}],\"maps\":{},\"query\":{\"queryKey\":null},\"cursor\":{\"limit\":100,\"after\":null,\"before\":null,\"order\":null}},\"error_code\":null,\"error_info\":null}"
                 }
             }
         },
         {
             "request": {
-                "method": "POST",
-                "body": "repositoryPHID=PHID-REPO-bvunnehri4u2isyr7bc3&api.token=cli-hahayouwish&diff=diff+--git+a%2Falpha+b%2Falpha%0Anew+file+mode+100644%0A---+%2Fdev%2Fnull%0A%2B%2B%2B+b%2Falpha%0A%40%40+-0%2C0+%2B1%2C1+%40%40%0A%2Balpha%0A",
-                "uri": "https://phab.mercurial-scm.org//api/differential.createrawdiff",
                 "headers": {
+                    "user-agent": [
+                        "mercurial/proto-1.0 (Mercurial 5.0+93-d811f17090a3+20190505)"
+                    ],
+                    "content-length": [
+                        "235"
+                    ],
                     "content-type": [
                         "application/x-www-form-urlencoded"
                     ],
                     "accept": [
                         "application/mercurial-0.1"
                     ],
-                    "user-agent": [
-                        "mercurial/proto-1.0 (Mercurial 4.9+477-7c86ec0ca5c5+20190303)"
-                    ],
                     "host": [
                         "phab.mercurial-scm.org"
-                    ],
-                    "content-length": [
-                        "235"
                     ]
-                }
+                },
+                "body": "api.token=cli-hahayouwish&diff=diff+--git+a%2Falpha+b%2Falpha%0Anew+file+mode+100644%0A---+%2Fdev%2Fnull%0A%2B%2B%2B+b%2Falpha%0A%40%40+-0%2C0+%2B1%2C1+%40%40%0A%2Balpha%0A&repositoryPHID=PHID-REPO-bvunnehri4u2isyr7bc3",
+                "uri": "https://phab.mercurial-scm.org//api/differential.createrawdiff",
+                "method": "POST"
             },
             "response": {
                 "status": {
                     "code": 200,
                     "message": "OK"
                 },
-                "body": {
-                    "string": "{\"result\":{\"id\":14303,\"phid\":\"PHID-DIFF-allzuauvigfjpv4z6dpi\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/differential\\/diff\\/14303\\/\"},\"error_code\":null,\"error_info\":null}"
-                },
                 "headers": {
-                    "expires": [
-                        "Sat, 01 Jan 2000 00:00:00 GMT"
-                    ],
                     "x-xss-protection": [
                         "1; mode=block"
                     ],
-                    "transfer-encoding": [
-                        "chunked"
-                    ],
-                    "date": [
-                        "Sun, 03 Mar 2019 00:12:24 GMT"
-                    ],
-                    "x-frame-options": [
-                        "Deny"
-                    ],
                     "cache-control": [
                         "no-store"
                     ],
                     "content-type": [
                         "application/json"
                     ],
+                    "date": [
+                        "Sun, 05 May 2019 13:31:01 GMT"
+                    ],
+                    "connection": [
+                        "keep-alive"
+                    ],
+                    "strict-transport-security": [
+                        "max-age=31536000; includeSubdomains; preload"
+                    ],
+                    "vary": [
+                        "Accept-Encoding"
+                    ],
+                    "x-frame-options": [
+                        "Deny"
+                    ],
+                    "content-length": [
+                        "172"
+                    ],
                     "x-content-type-options": [
                         "nosniff"
                     ],
-                    "server": [
-                        "Apache/2.4.10 (Debian)"
-                    ],
-                    "set-cookie": [
-                        "phsid=A%2F2n2dlkkwzljrpzfghpdsflbt4ftnrwcc446dzcy5; expires=Fri, 01-Mar-2024 00:12:24 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly"
-                    ],
-                    "strict-transport-security": [
-                        "max-age=0; includeSubdomains; preload"
+                    "expires": [
+                        "Sat, 01 Jan 2000 00:00:00 GMT"
                     ]
+                },
+                "body": {
+                    "string": "{\"result\":{\"id\":1899,\"phid\":\"PHID-DIFF-gpg57jico75ouhl2bux2\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/differential\\/diff\\/1899\\/\"},\"error_code\":null,\"error_info\":null}"
                 }
             }
         },
         {
             "request": {
-                "method": "POST",
-                "body": "diff_id=14303&data=%7B%22user%22%3A+%22test%22%2C+%22parent%22%3A+%220000000000000000000000000000000000000000%22%2C+%22node%22%3A+%22d386117f30e6b1282897bdbde75ac21e095163d4%22%2C+%22date%22%3A+%220+0%22%7D&api.token=cli-hahayouwish&name=hg%3Ameta",
-                "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
                 "headers": {
+                    "user-agent": [
+                        "mercurial/proto-1.0 (Mercurial 5.0+93-d811f17090a3+20190505)"
+                    ],
+                    "content-length": [
+                        "296"
+                    ],
                     "content-type": [
                         "application/x-www-form-urlencoded"
                     ],
                     "accept": [
                         "application/mercurial-0.1"
                     ],
-                    "user-agent": [
-                        "mercurial/proto-1.0 (Mercurial 4.9+477-7c86ec0ca5c5+20190303)"
-                    ],
                     "host": [
                         "phab.mercurial-scm.org"
-                    ],
-                    "content-length": [
-                        "264"
                     ]
-                }
+                },
+                "body": "diff_id=1899&data=%7B%22branch%22%3A+%22default%22%2C+%22date%22%3A+%220+0%22%2C+%22node%22%3A+%22d386117f30e6b1282897bdbde75ac21e095163d4%22%2C+%22parent%22%3A+%220000000000000000000000000000000000000000%22%2C+%22user%22%3A+%22test%22%7D&name=hg%3Ameta&api.token=cli-hahayouwish",
+                "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
+                "method": "POST"
             },
             "response": {
                 "status": {
                     "code": 200,
                     "message": "OK"
                 },
-                "body": {
-                    "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
-                },
                 "headers": {
-                    "expires": [
-                        "Sat, 01 Jan 2000 00:00:00 GMT"
-                    ],
                     "x-xss-protection": [
                         "1; mode=block"
                     ],
-                    "transfer-encoding": [
-                        "chunked"
-                    ],
-                    "date": [
-                        "Sun, 03 Mar 2019 00:12:25 GMT"
-                    ],
-                    "x-frame-options": [
-                        "Deny"
-                    ],
                     "cache-control": [
                         "no-store"
                     ],
                     "content-type": [
                         "application/json"
                     ],
+                    "date": [
+                        "Sun, 05 May 2019 13:31:02 GMT"
+                    ],
+                    "connection": [
+                        "keep-alive"
+                    ],
+                    "strict-transport-security": [
+                        "max-age=31536000; includeSubdomains; preload"
+                    ],
+                    "vary": [
+                        "Accept-Encoding"
+                    ],
+                    "x-frame-options": [
+                        "Deny"
+                    ],
+                    "content-length": [
+                        "51"
+                    ],
                     "x-content-type-options": [
                         "nosniff"
                     ],
-                    "server": [
-                        "Apache/2.4.10 (Debian)"
-                    ],
-                    "set-cookie": [
-                        "phsid=A%2F5mq3t25wu5igv7oufpwcoy32fveozo7wn5wni3gw; expires=Fri, 01-Mar-2024 00:12:25 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly"
-                    ],
-                    "strict-transport-security": [
-                        "max-age=0; includeSubdomains; preload"
+                    "expires": [
+                        "Sat, 01 Jan 2000 00:00:00 GMT"
                     ]
+                },
+                "body": {
+                    "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
                 }
             }
         },
         {
             "request": {
-                "method": "POST",
-                "body": "diff_id=14303&data=%7B%22d386117f30e6b1282897bdbde75ac21e095163d4%22%3A+%7B%22author%22%3A+%22test%22%2C+%22authorEmail%22%3A+%22test%22%2C+%22time%22%3A+0.0%7D%7D&api.token=cli-hahayouwish&name=local%3Acommits",
-                "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
                 "headers": {
+                    "user-agent": [
+                        "mercurial/proto-1.0 (Mercurial 5.0+93-d811f17090a3+20190505)"
+                    ],
+                    "content-length": [
+                        "257"
+                    ],
                     "content-type": [
                         "application/x-www-form-urlencoded"
                     ],
                     "accept": [
                         "application/mercurial-0.1"
                     ],
-                    "user-agent": [
-                        "mercurial/proto-1.0 (Mercurial 4.9+477-7c86ec0ca5c5+20190303)"
-                    ],
                     "host": [
                         "phab.mercurial-scm.org"
-                    ],
-                    "content-length": [
-                        "227"
                     ]
-                }
+                },
+                "body": "diff_id=1899&data=%7B%22d386117f30e6b1282897bdbde75ac21e095163d4%22%3A+%7B%22author%22%3A+%22test%22%2C+%22authorEmail%22%3A+%22test%22%2C+%22branch%22%3A+%22default%22%2C+%22commit%22%3A+%22d386117f30e6b1282897bdbde75ac21e095163d4%22%2C+%22parents%22%3A+%5B%220000000000000000000000000000000000000000%22%5D%2C+%22time%22%3A+0%7D%7D&name=local%3Acommits&api.token=cli-hahayouwish",
+                "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
+                "method": "POST"
             },
             "response": {
                 "status": {
                     "code": 200,
                     "message": "OK"
                 },
-                "body": {
-                    "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
-                },
                 "headers": {
-                    "expires": [
-                        "Sat, 01 Jan 2000 00:00:00 GMT"
-                    ],
                     "x-xss-protection": [
                         "1; mode=block"
                     ],
-                    "transfer-encoding": [
-                        "chunked"
-                    ],
-                    "date": [
-                        "Sun, 03 Mar 2019 00:12:25 GMT"
-                    ],
-                    "x-frame-options": [
-                        "Deny"
-                    ],
                     "cache-control": [
                         "no-store"
                     ],
                     "content-type": [
                         "application/json"
                     ],
+                    "date": [
+                        "Sun, 05 May 2019 13:31:02 GMT"
+                    ],
+                    "connection": [
+                        "keep-alive"
+                    ],
+                    "strict-transport-security": [
+                        "max-age=31536000; includeSubdomains; preload"
+                    ],
+                    "vary": [
+                        "Accept-Encoding"
+                    ],
+                    "x-frame-options": [
+                        "Deny"
+                    ],
+                    "content-length": [
+                        "51"
+                    ],
                     "x-content-type-options": [
                         "nosniff"
                     ],
-                    "server": [
-                        "Apache/2.4.10 (Debian)"
-                    ],
-                    "set-cookie": [
-                        "phsid=A%2F5nja6g4cnpt63ctjjwykxyceyb7kokfptrzbejoc; expires=Fri, 01-Mar-2024 00:12:25 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly"
-                    ],
-                    "strict-transport-security": [
-                        "max-age=0; includeSubdomains; preload"
+                    "expires": [
+                        "Sat, 01 Jan 2000 00:00:00 GMT"
                     ]
+                },
+                "body": {
+                    "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
                 }
             }
         },
         {
             "request": {
-                "method": "POST",
-                "body": "api.token=cli-hahayouwish&corpus=create+alpha+for+phabricator+test+%E2%82%AC",
-                "uri": "https://phab.mercurial-scm.org//api/differential.parsecommitmessage",
                 "headers": {
+                    "user-agent": [
+                        "mercurial/proto-1.0 (Mercurial 5.0+93-d811f17090a3+20190505)"
+                    ],
+                    "content-length": [
+                        "93"
+                    ],
                     "content-type": [
                         "application/x-www-form-urlencoded"
                     ],
                     "accept": [
                         "application/mercurial-0.1"
                     ],
-                    "user-agent": [
-                        "mercurial/proto-1.0 (Mercurial 4.9+477-7c86ec0ca5c5+20190303)"
-                    ],
                     "host": [
                         "phab.mercurial-scm.org"
-                    ],
-                    "content-length": [
-                        "93"
                     ]
-                }
+                },
+                "body": "api.token=cli-hahayouwish&corpus=create+alpha+for+phabricator+test+%E2%82%AC",
+                "uri": "https://phab.mercurial-scm.org//api/differential.parsecommitmessage",
+                "method": "POST"
             },
             "response": {
                 "status": {
                     "code": 200,
                     "message": "OK"
                 },
-                "body": {
-                    "string": "{\"result\":{\"errors\":[],\"fields\":{\"title\":\"create alpha for phabricator test \\u20ac\"},\"revisionIDFieldInfo\":{\"value\":null,\"validDomain\":\"https:\\/\\/phab.mercurial-scm.org\"}},\"error_code\":null,\"error_info\":null}"
-                },
                 "headers": {
-                    "expires": [
-                        "Sat, 01 Jan 2000 00:00:00 GMT"
-                    ],
                     "x-xss-protection": [
                         "1; mode=block"
                     ],
-                    "transfer-encoding": [
-                        "chunked"
-                    ],
-                    "date": [
-                        "Sun, 03 Mar 2019 00:12:26 GMT"
-                    ],
-                    "x-frame-options": [
-                        "Deny"
-                    ],
                     "cache-control": [
                         "no-store"
                     ],
                     "content-type": [
                         "application/json"
                     ],
+                    "date": [
+                        "Sun, 05 May 2019 13:31:03 GMT"
+                    ],
+                    "connection": [
+                        "keep-alive"
+                    ],
+                    "strict-transport-security": [
+                        "max-age=31536000; includeSubdomains; preload"
+                    ],
+                    "vary": [
+                        "Accept-Encoding"
+                    ],
+                    "x-frame-options": [
+                        "Deny"
+                    ],
+                    "content-length": [
+                        "298"
+                    ],
                     "x-content-type-options": [
                         "nosniff"
                     ],
-                    "server": [
-                        "Apache/2.4.10 (Debian)"
-                    ],
-                    "set-cookie": [
-                        "phsid=A%2Fkrxawhyvcd4jhv77inuwdmzcci4f7kql6c7l3smz; expires=Fri, 01-Mar-2024 00:12:26 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly"
-                    ],
-                    "strict-transport-security": [
-                        "max-age=0; includeSubdomains; preload"
+                    "expires": [
+                        "Sat, 01 Jan 2000 00:00:00 GMT"
                     ]
+                },
+                "body": {
+                    "string": "{\"result\":{\"errors\":[],\"fields\":{\"title\":\"create alpha for phabricator test \\u20ac\"},\"revisionIDFieldInfo\":{\"value\":null,\"validDomain\":\"https:\\/\\/phab.mercurial-scm.org\"},\"transactions\":[{\"type\":\"title\",\"value\":\"create alpha for phabricator test \\u20ac\"}]},\"error_code\":null,\"error_info\":null}"
                 }
             }
         },
         {
             "request": {
-                "method": "POST",
-                "body": "transactions%5B0%5D%5Btype%5D=update&transactions%5B0%5D%5Bvalue%5D=PHID-DIFF-allzuauvigfjpv4z6dpi&transactions%5B1%5D%5Btype%5D=title&transactions%5B1%5D%5Bvalue%5D=create+alpha+for+phabricator+test+%E2%82%AC&api.token=cli-hahayouwish",
-                "uri": "https://phab.mercurial-scm.org//api/differential.revision.edit",
                 "headers": {
+                    "user-agent": [
+                        "mercurial/proto-1.0 (Mercurial 5.0+93-d811f17090a3+20190505)"
+                    ],
+                    "content-length": [
+                        "252"
+                    ],
                     "content-type": [
                         "application/x-www-form-urlencoded"
                     ],
                     "accept": [
                         "application/mercurial-0.1"
                     ],
-                    "user-agent": [
-                        "mercurial/proto-1.0 (Mercurial 4.9+477-7c86ec0ca5c5+20190303)"
-                    ],
                     "host": [
                         "phab.mercurial-scm.org"
-                    ],
-                    "content-length": [
-                        "252"
                     ]
-                }
+                },
+                "body": "api.token=cli-hahayouwish&transactions%5B0%5D%5Btype%5D=update&transactions%5B0%5D%5Bvalue%5D=PHID-DIFF-gpg57jico75ouhl2bux2&transactions%5B1%5D%5Btype%5D=title&transactions%5B1%5D%5Bvalue%5D=create+alpha+for+phabricator+test+%E2%82%AC",
+                "uri": "https://phab.mercurial-scm.org//api/differential.revision.edit",
+                "method": "POST"
             },
             "response": {
                 "status": {
                     "code": 200,
                     "message": "OK"
                 },
-                "body": {
-                    "string": "{\"result\":{\"object\":{\"id\":6054,\"phid\":\"PHID-DREV-6pczsbtdpqjc2nskmxwy\"},\"transactions\":[{\"phid\":\"PHID-XACT-DREV-efgl4j4fesixjog\"},{\"phid\":\"PHID-XACT-DREV-xj7ksjeyfadwf5m\"},{\"phid\":\"PHID-XACT-DREV-gecx5zw42kkuffc\"},{\"phid\":\"PHID-XACT-DREV-asda7zcwgzdadoi\"},{\"phid\":\"PHID-XACT-DREV-ku26t33y6iiugjw\"}]},\"error_code\":null,\"error_info\":null}"
-                },
                 "headers": {
-                    "expires": [
-                        "Sat, 01 Jan 2000 00:00:00 GMT"
-                    ],
                     "x-xss-protection": [
                         "1; mode=block"
                     ],
-                    "transfer-encoding": [
-                        "chunked"
-                    ],
-                    "date": [
-                        "Sun, 03 Mar 2019 00:12:27 GMT"
-                    ],
-                    "x-frame-options": [
-                        "Deny"
-                    ],
                     "cache-control": [
                         "no-store"
                     ],
                     "content-type": [
                         "application/json"
                     ],
+                    "date": [
+                        "Sun, 05 May 2019 13:31:04 GMT"
+                    ],
+                    "connection": [
+                        "keep-alive"
+                    ],
+                    "strict-transport-security": [
+                        "max-age=31536000; includeSubdomains; preload"
+                    ],
+                    "vary": [
+                        "Accept-Encoding"
+                    ],
+                    "x-frame-options": [
+                        "Deny"
+                    ],
+                    "content-length": [
+                        "294"
+                    ],
                     "x-content-type-options": [
                         "nosniff"
                     ],
-                    "server": [
-                        "Apache/2.4.10 (Debian)"
-                    ],
-                    "set-cookie": [
-                        "phsid=A%2Fjwgcqb5hvbltjq4jqbpauz7rmmhpuh2rb7phsdmf; expires=Fri, 01-Mar-2024 00:12:27 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly"
-                    ],
-                    "strict-transport-security": [
-                        "max-age=0; includeSubdomains; preload"
+                    "expires": [
+                        "Sat, 01 Jan 2000 00:00:00 GMT"
                     ]
+                },
+                "body": {
+                    "string": "{\"result\":{\"object\":{\"id\":1190,\"phid\":\"PHID-DREV-kikesmfxhzpfaxbzgj3l\"},\"transactions\":[{\"phid\":\"PHID-XACT-DREV-od4nnssrqj57m6x\"},{\"phid\":\"PHID-XACT-DREV-2prb5lagzng6uqt\"},{\"phid\":\"PHID-XACT-DREV-qu7o6fgwssovbwb\"},{\"phid\":\"PHID-XACT-DREV-uynfy6n3u6new5f\"}]},\"error_code\":null,\"error_info\":null}"
                 }
             }
         },
         {
             "request": {
-                "method": "POST",
-                "body": "api.token=cli-hahayouwish&ids%5B0%5D=6054",
-                "uri": "https://phab.mercurial-scm.org//api/differential.query",
                 "headers": {
+                    "user-agent": [
+                        "mercurial/proto-1.0 (Mercurial 5.0+93-d811f17090a3+20190505)"
+                    ],
+                    "content-length": [
+                        "58"
+                    ],
                     "content-type": [
                         "application/x-www-form-urlencoded"
                     ],
                     "accept": [
                         "application/mercurial-0.1"
                     ],
-                    "user-agent": [
-                        "mercurial/proto-1.0 (Mercurial 4.9+477-7c86ec0ca5c5+20190303)"
-                    ],
                     "host": [
                         "phab.mercurial-scm.org"
-                    ],
-                    "content-length": [
-                        "58"
                     ]
-                }
+                },
+                "body": "ids%5B0%5D=1190&api.token=cli-hahayouwish",
+                "uri": "https://phab.mercurial-scm.org//api/differential.query",
+                "method": "POST"
             },
             "response": {
                 "status": {
                     "code": 200,
                     "message": "OK"
                 },
-                "body": {
-                    "string": "{\"result\":[{\"id\":\"6054\",\"phid\":\"PHID-DREV-6pczsbtdpqjc2nskmxwy\",\"title\":\"create alpha for phabricator test \\u20ac\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/D6054\",\"dateCreated\":\"1551571947\",\"dateModified\":\"1551571947\",\"authorPHID\":\"PHID-USER-5iy6mkoveguhm2zthvww\",\"status\":\"0\",\"statusName\":\"Needs Review\",\"properties\":[],\"branch\":null,\"summary\":\"\",\"testPlan\":\"\",\"lineCount\":\"1\",\"activeDiffPHID\":\"PHID-DIFF-allzuauvigfjpv4z6dpi\",\"diffs\":[\"14303\"],\"commits\":[],\"reviewers\":{\"PHID-PROJ-3dvcxzznrjru2xmmses3\":\"PHID-PROJ-3dvcxzznrjru2xmmses3\"},\"ccs\":[\"PHID-USER-q42dn7cc3donqriafhjx\"],\"hashes\":[],\"auxiliary\":{\"phabricator:projects\":[],\"phabricator:depends-on\":[]},\"repositoryPHID\":\"PHID-REPO-bvunnehri4u2isyr7bc3\",\"sourcePath\":null}],\"error_code\":null,\"error_info\":null}"
-                },
                 "headers": {
-                    "expires": [
-                        "Sat, 01 Jan 2000 00:00:00 GMT"
-                    ],
                     "x-xss-protection": [
                         "1; mode=block"
                     ],
-                    "transfer-encoding": [
-                        "chunked"
-                    ],
-                    "date": [
-                        "Sun, 03 Mar 2019 00:12:28 GMT"
-                    ],
-                    "x-frame-options": [
-                        "Deny"
-                    ],
                     "cache-control": [
                         "no-store"
                     ],
                     "content-type": [
                         "application/json"
                     ],
+                    "date": [
+                        "Sun, 05 May 2019 13:31:05 GMT"
+                    ],
+                    "connection": [
+                        "keep-alive"
+                    ],
+                    "strict-transport-security": [
+                        "max-age=31536000; includeSubdomains; preload"
+                    ],
+                    "vary": [
+                        "Accept-Encoding"
+                    ],
+                    "x-frame-options": [
+                        "Deny"
+                    ],
+                    "content-length": [
+                        "778"
+                    ],
                     "x-content-type-options": [
                         "nosniff"
                     ],
-                    "server": [
-                        "Apache/2.4.10 (Debian)"
-                    ],
-                    "set-cookie": [
-                        "phsid=A%2F3lgkbbyaa646ng5klghjyehsbjxtaqblipnvocuz; expires=Fri, 01-Mar-2024 00:12:28 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly"
-                    ],
-                    "strict-transport-security": [
-                        "max-age=0; includeSubdomains; preload"
+                    "expires": [
+                        "Sat, 01 Jan 2000 00:00:00 GMT"
                     ]
+                },
+                "body": {
+                    "string": "{\"result\":[{\"id\":\"1190\",\"phid\":\"PHID-DREV-kikesmfxhzpfaxbzgj3l\",\"title\":\"create alpha for phabricator test \\u20ac\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/D1190\",\"dateCreated\":\"1557063064\",\"dateModified\":\"1557063064\",\"authorPHID\":\"PHID-USER-qmzis76vb2yh3ogldu6r\",\"status\":\"0\",\"statusName\":\"Draft\",\"properties\":{\"draft.broadcast\":false,\"lines.added\":1,\"lines.removed\":0},\"branch\":null,\"summary\":\"\",\"testPlan\":\"\",\"lineCount\":\"1\",\"activeDiffPHID\":\"PHID-DIFF-gpg57jico75ouhl2bux2\",\"diffs\":[\"1899\"],\"commits\":[],\"reviewers\":[],\"ccs\":[],\"hashes\":[],\"auxiliary\":{\"bugzilla.bug-id\":null,\"phabricator:projects\":[\"PHID-PROJ-f2a3wl5wxtqdtfgdjqzk\"],\"phabricator:depends-on\":[]},\"repositoryPHID\":\"PHID-REPO-bvunnehri4u2isyr7bc3\",\"sourcePath\":null}],\"error_code\":null,\"error_info\":null}"
                 }
             }
         },
         {
             "request": {
-                "method": "POST",
-                "body": "diff_id=14303&data=%7B%22user%22%3A+%22test%22%2C+%22parent%22%3A+%220000000000000000000000000000000000000000%22%2C+%22node%22%3A+%22cb03845d6dd98c72bec766c7ed08c693cc49817a%22%2C+%22date%22%3A+%220+0%22%7D&api.token=cli-hahayouwish&name=hg%3Ameta",
-                "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
                 "headers": {
+                    "user-agent": [
+                        "mercurial/proto-1.0 (Mercurial 5.0+93-d811f17090a3+20190505)"
+                    ],
+                    "content-length": [
+                        "296"
+                    ],
                     "content-type": [
                         "application/x-www-form-urlencoded"
                     ],
                     "accept": [
                         "application/mercurial-0.1"
                     ],
-                    "user-agent": [
-                        "mercurial/proto-1.0 (Mercurial 4.9+477-7c86ec0ca5c5+20190303)"
-                    ],
                     "host": [
                         "phab.mercurial-scm.org"
-                    ],
-                    "content-length": [
-                        "264"
                     ]
-                }
+                },
+                "body": "diff_id=1899&data=%7B%22branch%22%3A+%22default%22%2C+%22date%22%3A+%220+0%22%2C+%22node%22%3A+%22a86ed7d85e866f01161e9f55cee5d116272f508f%22%2C+%22parent%22%3A+%220000000000000000000000000000000000000000%22%2C+%22user%22%3A+%22test%22%7D&name=hg%3Ameta&api.token=cli-hahayouwish",
+                "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
+                "method": "POST"
             },
             "response": {
                 "status": {
                     "code": 200,
                     "message": "OK"
                 },
-                "body": {
-                    "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
-                },
                 "headers": {
-                    "expires": [
-                        "Sat, 01 Jan 2000 00:00:00 GMT"
-                    ],
                     "x-xss-protection": [
                         "1; mode=block"
                     ],
-                    "transfer-encoding": [
-                        "chunked"
-                    ],
-                    "date": [
-                        "Sun, 03 Mar 2019 00:12:28 GMT"
-                    ],
-                    "x-frame-options": [
-                        "Deny"
-                    ],
                     "cache-control": [
                         "no-store"
                     ],
                     "content-type": [
                         "application/json"
                     ],
+                    "date": [
+                        "Sun, 05 May 2019 13:31:06 GMT"
+                    ],
+                    "connection": [
+                        "keep-alive"
+                    ],
+                    "strict-transport-security": [
+                        "max-age=31536000; includeSubdomains; preload"
+                    ],
+                    "vary": [
+                        "Accept-Encoding"
+                    ],
+                    "x-frame-options": [
+                        "Deny"
+                    ],
+                    "content-length": [
+                        "51"
+                    ],
                     "x-content-type-options": [
                         "nosniff"
                     ],
-                    "server": [
-                        "Apache/2.4.10 (Debian)"
-                    ],
-                    "set-cookie": [
-                        "phsid=A%2Fwjxvlsjqmqwvcljfv6oe2sbometi3gebps6vzrlw; expires=Fri, 01-Mar-2024 00:12:28 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly"
-                    ],
-                    "strict-transport-security": [
-                        "max-age=0; includeSubdomains; preload"
+                    "expires": [
+                        "Sat, 01 Jan 2000 00:00:00 GMT"
                     ]
+                },
+                "body": {
+                    "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
                 }
             }
         },
         {
             "request": {
-                "method": "POST",
-                "body": "diff_id=14303&data=%7B%22cb03845d6dd98c72bec766c7ed08c693cc49817a%22%3A+%7B%22author%22%3A+%22test%22%2C+%22authorEmail%22%3A+%22test%22%2C+%22time%22%3A+0.0%7D%7D&api.token=cli-hahayouwish&name=local%3Acommits",
-                "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
                 "headers": {
+                    "user-agent": [
+                        "mercurial/proto-1.0 (Mercurial 5.0+93-d811f17090a3+20190505)"
+                    ],
+                    "content-length": [
+                        "257"
+                    ],
                     "content-type": [
                         "application/x-www-form-urlencoded"
                     ],
                     "accept": [
                         "application/mercurial-0.1"
                     ],
-                    "user-agent": [
-                        "mercurial/proto-1.0 (Mercurial 4.9+477-7c86ec0ca5c5+20190303)"
-                    ],
                     "host": [
                         "phab.mercurial-scm.org"
-                    ],
-                    "content-length": [
-                        "227"
                     ]
-                }
+                },
+                "body": "diff_id=1899&data=%7B%22a86ed7d85e866f01161e9f55cee5d116272f508f%22%3A+%7B%22author%22%3A+%22test%22%2C+%22authorEmail%22%3A+%22test%22%2C+%22branch%22%3A+%22default%22%2C+%22commit%22%3A+%22a86ed7d85e866f01161e9f55cee5d116272f508f%22%2C+%22parents%22%3A+%5B%220000000000000000000000000000000000000000%22%5D%2C+%22time%22%3A+0%7D%7D&name=local%3Acommits&api.token=cli-hahayouwish",
+                "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
+                "method": "POST"
             },
             "response": {
                 "status": {
                     "code": 200,
                     "message": "OK"
                 },
-                "body": {
-                    "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
-                },
                 "headers": {
-                    "expires": [
-                        "Sat, 01 Jan 2000 00:00:00 GMT"
-                    ],
                     "x-xss-protection": [
                         "1; mode=block"
                     ],
-                    "transfer-encoding": [
-                        "chunked"
-                    ],
-                    "date": [
-                        "Sun, 03 Mar 2019 00:12:29 GMT"
-                    ],
-                    "x-frame-options": [
-                        "Deny"
-                    ],
                     "cache-control": [
                         "no-store"
                     ],
                     "content-type": [
                         "application/json"
                     ],
+                    "date": [
+                        "Sun, 05 May 2019 13:31:06 GMT"
+                    ],
+                    "connection": [
+                        "keep-alive"
+                    ],
+                    "strict-transport-security": [
+                        "max-age=31536000; includeSubdomains; preload"
+                    ],
+                    "vary": [
+                        "Accept-Encoding"
+                    ],
+                    "x-frame-options": [
+                        "Deny"
+                    ],
+                    "content-length": [
+                        "51"
+                    ],
                     "x-content-type-options": [
                         "nosniff"
                     ],
-                    "server": [
-                        "Apache/2.4.10 (Debian)"
-                    ],
-                    "set-cookie": [
-                        "phsid=A%2Foeyncgzaanzmnhgfc7ecvmu5pq7qju7ewq6tvgrp; expires=Fri, 01-Mar-2024 00:12:29 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly"
-                    ],
-                    "strict-transport-security": [
-                        "max-age=0; includeSubdomains; preload"
+                    "expires": [
+                        "Sat, 01 Jan 2000 00:00:00 GMT"
                     ]
+                },
+                "body": {
+                    "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
                 }
             }
         }
-    ],
-    "version": 1
+    ]
 }
--- a/tests/phabricator/phabsend-create-public.json	Mon May 20 10:08:28 2019 +0200
+++ b/tests/phabricator/phabsend-create-public.json	Mon May 20 11:40:47 2019 -0400
@@ -1,957 +1,957 @@
 {
+    "version": 1,
     "interactions": [
         {
-            "response": {
+            "request": {
                 "headers": {
+                    "user-agent": [
+                        "mercurial/proto-1.0 (Mercurial 5.0+93-d811f17090a3+20190505)"
+                    ],
+                    "content-length": [
+                        "93"
+                    ],
                     "content-type": [
-                        "application/json"
-                    ], 
-                    "date": [
-                        "Thu, 10 Jan 2019 04:08:24 GMT"
-                    ], 
-                    "x-content-type-options": [
-                        "nosniff"
-                    ], 
-                    "cache-control": [
-                        "no-store"
-                    ], 
-                    "server": [
-                        "Apache/2.4.10 (Debian)"
-                    ], 
-                    "x-xss-protection": [
-                        "1; mode=block"
-                    ], 
-                    "x-frame-options": [
-                        "Deny"
-                    ], 
-                    "expires": [
-                        "Sat, 01 Jan 2000 00:00:00 GMT"
-                    ], 
-                    "set-cookie": [
-                        "phsid=A%2F5faozuxaekgxbyfcc43jvrcmbr5fscbki46mvcvl; expires=Tue, 09-Jan-2024 04:08:24 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly"
-                    ], 
-                    "transfer-encoding": [
-                        "chunked"
-                    ], 
-                    "strict-transport-security": [
-                        "max-age=0; includeSubdomains; preload"
-                    ]
-                }, 
-                "status": {
-                    "message": "OK", 
-                    "code": 200
-                }, 
-                "body": {
-                    "string": "{\"result\":{\"data\":[{\"id\":2,\"type\":\"REPO\",\"phid\":\"PHID-REPO-bvunnehri4u2isyr7bc3\",\"fields\":{\"name\":\"Mercurial\",\"vcs\":\"hg\",\"callsign\":\"HG\",\"shortName\":null,\"status\":\"active\",\"isImporting\":false,\"spacePHID\":null,\"dateCreated\":1498761653,\"dateModified\":1500403184,\"policy\":{\"view\":\"public\",\"edit\":\"admin\",\"diffusion.push\":\"users\"}},\"attachments\":{}}],\"maps\":{},\"query\":{\"queryKey\":null},\"cursor\":{\"limit\":100,\"after\":null,\"before\":null,\"order\":null}},\"error_code\":null,\"error_info\":null}"
-                }
-            }, 
-            "request": {
-                "method": "POST", 
-                "uri": "https://phab.mercurial-scm.org//api/diffusion.repository.search", 
-                "headers": {
-                    "content-length": [
-                        "79"
-                    ], 
+                        "application/x-www-form-urlencoded"
+                    ],
                     "accept": [
                         "application/mercurial-0.1"
-                    ], 
-                    "content-type": [
-                        "application/x-www-form-urlencoded"
-                    ], 
-                    "user-agent": [
-                        "mercurial/proto-1.0 (Mercurial 4.8.2+682-e2cf04a597cc+20190109)"
-                    ], 
+                    ],
                     "host": [
                         "phab.mercurial-scm.org"
                     ]
-                }, 
-                "body": "constraints%5Bcallsigns%5D%5B0%5D=HG&api.token=cli-hahayouwish"
-            }
-        }, 
-        {
+                },
+                "body": "api.token=cli-hahayouwish&constraints%5Bcallsigns%5D%5B0%5D=HG",
+                "uri": "https://phab.mercurial-scm.org//api/diffusion.repository.search",
+                "method": "POST"
+            },
             "response": {
+                "status": {
+                    "code": 200,
+                    "message": "OK"
+                },
                 "headers": {
+                    "x-xss-protection": [
+                        "1; mode=block"
+                    ],
+                    "cache-control": [
+                        "no-store"
+                    ],
                     "content-type": [
                         "application/json"
-                    ], 
+                    ],
                     "date": [
-                        "Thu, 10 Jan 2019 04:08:25 GMT"
-                    ], 
+                        "Sun, 05 May 2019 13:31:20 GMT"
+                    ],
+                    "connection": [
+                        "keep-alive"
+                    ],
+                    "strict-transport-security": [
+                        "max-age=31536000; includeSubdomains; preload"
+                    ],
+                    "vary": [
+                        "Accept-Encoding"
+                    ],
+                    "x-frame-options": [
+                        "Deny"
+                    ],
+                    "content-length": [
+                        "549"
+                    ],
                     "x-content-type-options": [
                         "nosniff"
-                    ], 
-                    "cache-control": [
-                        "no-store"
-                    ], 
-                    "server": [
-                        "Apache/2.4.10 (Debian)"
-                    ], 
-                    "x-xss-protection": [
-                        "1; mode=block"
-                    ], 
-                    "x-frame-options": [
-                        "Deny"
-                    ], 
+                    ],
                     "expires": [
                         "Sat, 01 Jan 2000 00:00:00 GMT"
-                    ], 
-                    "set-cookie": [
-                        "phsid=A%2Fkb72422mbpyuyoultl4hkizat6qscjgrl5hi6k2n; expires=Tue, 09-Jan-2024 04:08:25 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly"
-                    ], 
-                    "transfer-encoding": [
-                        "chunked"
-                    ], 
-                    "strict-transport-security": [
-                        "max-age=0; includeSubdomains; preload"
                     ]
-                }, 
-                "status": {
-                    "message": "OK", 
-                    "code": 200
-                }, 
+                },
                 "body": {
-                    "string": "{\"result\":{\"id\":13121,\"phid\":\"PHID-DIFF-xrku5f3mlveqr3hhj6a7\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/differential\\/diff\\/13121\\/\"},\"error_code\":null,\"error_info\":null}"
+                    "string": "{\"result\":{\"data\":[{\"id\":10,\"type\":\"REPO\",\"phid\":\"PHID-REPO-bvunnehri4u2isyr7bc3\",\"fields\":{\"name\":\"Mercurial\",\"vcs\":\"hg\",\"callsign\":\"HG\",\"shortName\":\"Mercurial\",\"status\":\"active\",\"isImporting\":false,\"almanacServicePHID\":null,\"spacePHID\":null,\"dateCreated\":1507817156,\"dateModified\":1529613276,\"policy\":{\"view\":\"public\",\"edit\":\"admin\",\"diffusion.push\":\"users\"}},\"attachments\":{}}],\"maps\":{},\"query\":{\"queryKey\":null},\"cursor\":{\"limit\":100,\"after\":null,\"before\":null,\"order\":null}},\"error_code\":null,\"error_info\":null}"
                 }
-            }, 
+            }
+        },
+        {
             "request": {
-                "method": "POST", 
-                "uri": "https://phab.mercurial-scm.org//api/differential.createrawdiff", 
                 "headers": {
+                    "user-agent": [
+                        "mercurial/proto-1.0 (Mercurial 5.0+93-d811f17090a3+20190505)"
+                    ],
                     "content-length": [
                         "220"
-                    ], 
+                    ],
+                    "content-type": [
+                        "application/x-www-form-urlencoded"
+                    ],
                     "accept": [
                         "application/mercurial-0.1"
-                    ], 
-                    "content-type": [
-                        "application/x-www-form-urlencoded"
-                    ], 
-                    "user-agent": [
-                        "mercurial/proto-1.0 (Mercurial 4.8.2+682-e2cf04a597cc+20190109)"
-                    ], 
+                    ],
                     "host": [
                         "phab.mercurial-scm.org"
                     ]
-                }, 
-                "body": "repositoryPHID=PHID-REPO-bvunnehri4u2isyr7bc3&diff=diff+--git+a%2Fbeta+b%2Fbeta%0A---+a%2Fbeta%0A%2B%2B%2B+b%2Fbeta%0A%40%40+-1%2C1+%2B1%2C1+%40%40%0A-beta%0A%2Bpublic+change%0A&api.token=cli-hahayouwish"
-            }
-        }, 
-        {
+                },
+                "body": "api.token=cli-hahayouwish&diff=diff+--git+a%2Fbeta+b%2Fbeta%0A---+a%2Fbeta%0A%2B%2B%2B+b%2Fbeta%0A%40%40+-1%2C1+%2B1%2C1+%40%40%0A-beta%0A%2Bpublic+change%0A&repositoryPHID=PHID-REPO-bvunnehri4u2isyr7bc3",
+                "uri": "https://phab.mercurial-scm.org//api/differential.createrawdiff",
+                "method": "POST"
+            },
             "response": {
+                "status": {
+                    "code": 200,
+                    "message": "OK"
+                },
                 "headers": {
+                    "x-xss-protection": [
+                        "1; mode=block"
+                    ],
+                    "cache-control": [
+                        "no-store"
+                    ],
                     "content-type": [
                         "application/json"
-                    ], 
+                    ],
                     "date": [
-                        "Thu, 10 Jan 2019 04:08:25 GMT"
-                    ], 
+                        "Sun, 05 May 2019 13:31:21 GMT"
+                    ],
+                    "connection": [
+                        "keep-alive"
+                    ],
+                    "strict-transport-security": [
+                        "max-age=31536000; includeSubdomains; preload"
+                    ],
+                    "vary": [
+                        "Accept-Encoding"
+                    ],
+                    "x-frame-options": [
+                        "Deny"
+                    ],
+                    "content-length": [
+                        "172"
+                    ],
                     "x-content-type-options": [
                         "nosniff"
-                    ], 
-                    "cache-control": [
-                        "no-store"
-                    ], 
-                    "server": [
-                        "Apache/2.4.10 (Debian)"
-                    ], 
-                    "x-xss-protection": [
-                        "1; mode=block"
-                    ], 
-                    "x-frame-options": [
-                        "Deny"
-                    ], 
+                    ],
                     "expires": [
                         "Sat, 01 Jan 2000 00:00:00 GMT"
-                    ], 
-                    "set-cookie": [
-                        "phsid=A%2Fpyr677mjsjvlsn3wwzl2iignpppablawwz7dn5ap; expires=Tue, 09-Jan-2024 04:08:25 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly"
-                    ], 
-                    "transfer-encoding": [
-                        "chunked"
-                    ], 
-                    "strict-transport-security": [
-                        "max-age=0; includeSubdomains; preload"
                     ]
-                }, 
-                "status": {
-                    "message": "OK", 
-                    "code": 200
-                }, 
+                },
                 "body": {
-                    "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
+                    "string": "{\"result\":{\"id\":1902,\"phid\":\"PHID-DIFF-uuzq4s7s72y4ts7ijduc\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/differential\\/diff\\/1902\\/\"},\"error_code\":null,\"error_info\":null}"
                 }
-            }, 
+            }
+        },
+        {
             "request": {
-                "method": "POST", 
-                "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty", 
                 "headers": {
+                    "user-agent": [
+                        "mercurial/proto-1.0 (Mercurial 5.0+93-d811f17090a3+20190505)"
+                    ],
                     "content-length": [
-                        "264"
-                    ], 
+                        "296"
+                    ],
+                    "content-type": [
+                        "application/x-www-form-urlencoded"
+                    ],
                     "accept": [
                         "application/mercurial-0.1"
-                    ], 
-                    "content-type": [
-                        "application/x-www-form-urlencoded"
-                    ], 
-                    "user-agent": [
-                        "mercurial/proto-1.0 (Mercurial 4.8.2+682-e2cf04a597cc+20190109)"
-                    ], 
+                    ],
                     "host": [
                         "phab.mercurial-scm.org"
                     ]
-                }, 
-                "body": "name=hg%3Ameta&api.token=cli-hahayouwish&data=%7B%22date%22%3A+%220+0%22%2C+%22user%22%3A+%22test%22%2C+%22node%22%3A+%22540a21d3fbeb7c56cafe726bba6cd9fdcc94f29c%22%2C+%22parent%22%3A+%22c2b605ada280b38c38031b5d31622869c72b0d8d%22%7D&diff_id=13121"
-            }
-        }, 
-        {
+                },
+                "body": "diff_id=1902&data=%7B%22branch%22%3A+%22default%22%2C+%22date%22%3A+%220+0%22%2C+%22node%22%3A+%2224ffd6bca53a1e05369ed5b8834587c2b2b364da%22%2C+%22parent%22%3A+%222837deb84f4ab1315c1197b8aef10c620465e352%22%2C+%22user%22%3A+%22test%22%7D&name=hg%3Ameta&api.token=cli-hahayouwish",
+                "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
+                "method": "POST"
+            },
             "response": {
+                "status": {
+                    "code": 200,
+                    "message": "OK"
+                },
                 "headers": {
-                    "content-type": [
-                        "application/json"
-                    ], 
-                    "date": [
-                        "Thu, 10 Jan 2019 04:08:26 GMT"
-                    ], 
-                    "x-content-type-options": [
-                        "nosniff"
-                    ], 
+                    "x-xss-protection": [
+                        "1; mode=block"
+                    ],
                     "cache-control": [
                         "no-store"
-                    ], 
-                    "server": [
-                        "Apache/2.4.10 (Debian)"
-                    ], 
-                    "x-xss-protection": [
-                        "1; mode=block"
-                    ], 
+                    ],
+                    "content-type": [
+                        "application/json"
+                    ],
+                    "date": [
+                        "Sun, 05 May 2019 13:31:22 GMT"
+                    ],
+                    "connection": [
+                        "keep-alive"
+                    ],
+                    "strict-transport-security": [
+                        "max-age=31536000; includeSubdomains; preload"
+                    ],
+                    "vary": [
+                        "Accept-Encoding"
+                    ],
                     "x-frame-options": [
                         "Deny"
-                    ], 
+                    ],
+                    "content-length": [
+                        "51"
+                    ],
+                    "x-content-type-options": [
+                        "nosniff"
+                    ],
                     "expires": [
                         "Sat, 01 Jan 2000 00:00:00 GMT"
-                    ], 
-                    "set-cookie": [
-                        "phsid=A%2Fegvbvujn6hykhurzyjtaq4xduxl6sz7gavenbcou; expires=Tue, 09-Jan-2024 04:08:26 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly"
-                    ], 
-                    "transfer-encoding": [
-                        "chunked"
-                    ], 
-                    "strict-transport-security": [
-                        "max-age=0; includeSubdomains; preload"
                     ]
-                }, 
-                "status": {
-                    "message": "OK", 
-                    "code": 200
-                }, 
+                },
                 "body": {
                     "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
                 }
-            }, 
+            }
+        },
+        {
             "request": {
-                "method": "POST", 
-                "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty", 
                 "headers": {
+                    "user-agent": [
+                        "mercurial/proto-1.0 (Mercurial 5.0+93-d811f17090a3+20190505)"
+                    ],
                     "content-length": [
-                        "227"
-                    ], 
+                        "257"
+                    ],
+                    "content-type": [
+                        "application/x-www-form-urlencoded"
+                    ],
                     "accept": [
                         "application/mercurial-0.1"
-                    ], 
-                    "content-type": [
-                        "application/x-www-form-urlencoded"
-                    ], 
-                    "user-agent": [
-                        "mercurial/proto-1.0 (Mercurial 4.8.2+682-e2cf04a597cc+20190109)"
-                    ], 
+                    ],
                     "host": [
                         "phab.mercurial-scm.org"
                     ]
-                }, 
-                "body": "name=local%3Acommits&api.token=cli-hahayouwish&data=%7B%22540a21d3fbeb7c56cafe726bba6cd9fdcc94f29c%22%3A+%7B%22author%22%3A+%22test%22%2C+%22authorEmail%22%3A+%22test%22%2C+%22time%22%3A+0.0%7D%7D&diff_id=13121"
-            }
-        }, 
-        {
+                },
+                "body": "diff_id=1902&data=%7B%2224ffd6bca53a1e05369ed5b8834587c2b2b364da%22%3A+%7B%22author%22%3A+%22test%22%2C+%22authorEmail%22%3A+%22test%22%2C+%22branch%22%3A+%22default%22%2C+%22commit%22%3A+%2224ffd6bca53a1e05369ed5b8834587c2b2b364da%22%2C+%22parents%22%3A+%5B%222837deb84f4ab1315c1197b8aef10c620465e352%22%5D%2C+%22time%22%3A+0%7D%7D&name=local%3Acommits&api.token=cli-hahayouwish",
+                "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
+                "method": "POST"
+            },
             "response": {
+                "status": {
+                    "code": 200,
+                    "message": "OK"
+                },
                 "headers": {
+                    "x-xss-protection": [
+                        "1; mode=block"
+                    ],
+                    "cache-control": [
+                        "no-store"
+                    ],
                     "content-type": [
                         "application/json"
-                    ], 
+                    ],
                     "date": [
-                        "Thu, 10 Jan 2019 04:08:26 GMT"
-                    ], 
+                        "Sun, 05 May 2019 13:31:23 GMT"
+                    ],
+                    "connection": [
+                        "keep-alive"
+                    ],
+                    "strict-transport-security": [
+                        "max-age=31536000; includeSubdomains; preload"
+                    ],
+                    "vary": [
+                        "Accept-Encoding"
+                    ],
+                    "x-frame-options": [
+                        "Deny"
+                    ],
+                    "content-length": [
+                        "51"
+                    ],
                     "x-content-type-options": [
                         "nosniff"
-                    ], 
-                    "cache-control": [
-                        "no-store"
-                    ], 
-                    "server": [
-                        "Apache/2.4.10 (Debian)"
-                    ], 
-                    "x-xss-protection": [
-                        "1; mode=block"
-                    ], 
-                    "x-frame-options": [
-                        "Deny"
-                    ], 
+                    ],
                     "expires": [
                         "Sat, 01 Jan 2000 00:00:00 GMT"
-                    ], 
-                    "set-cookie": [
-                        "phsid=A%2Flbjzqvie4g24kmhnqws2bwhmeiijd3qvvkd22isg; expires=Tue, 09-Jan-2024 04:08:27 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly"
-                    ], 
-                    "transfer-encoding": [
-                        "chunked"
-                    ], 
-                    "strict-transport-security": [
-                        "max-age=0; includeSubdomains; preload"
                     ]
-                }, 
-                "status": {
-                    "message": "OK", 
-                    "code": 200
-                }, 
+                },
                 "body": {
-                    "string": "{\"result\":{\"errors\":[],\"fields\":{\"title\":\"create public change for phabricator testing\"},\"revisionIDFieldInfo\":{\"value\":null,\"validDomain\":\"https:\\/\\/phab.mercurial-scm.org\"}},\"error_code\":null,\"error_info\":null}"
+                    "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
                 }
-            }, 
+            }
+        },
+        {
             "request": {
-                "method": "POST", 
-                "uri": "https://phab.mercurial-scm.org//api/differential.parsecommitmessage", 
                 "headers": {
+                    "user-agent": [
+                        "mercurial/proto-1.0 (Mercurial 5.0+93-d811f17090a3+20190505)"
+                    ],
                     "content-length": [
                         "94"
-                    ], 
+                    ],
+                    "content-type": [
+                        "application/x-www-form-urlencoded"
+                    ],
                     "accept": [
                         "application/mercurial-0.1"
-                    ], 
-                    "content-type": [
-                        "application/x-www-form-urlencoded"
-                    ], 
-                    "user-agent": [
-                        "mercurial/proto-1.0 (Mercurial 4.8.2+682-e2cf04a597cc+20190109)"
-                    ], 
+                    ],
                     "host": [
                         "phab.mercurial-scm.org"
                     ]
-                }, 
-                "body": "corpus=create+public+change+for+phabricator+testing&api.token=cli-hahayouwish"
-            }
-        }, 
-        {
+                },
+                "body": "api.token=cli-hahayouwish&corpus=create+public+change+for+phabricator+testing",
+                "uri": "https://phab.mercurial-scm.org//api/differential.parsecommitmessage",
+                "method": "POST"
+            },
             "response": {
+                "status": {
+                    "code": 200,
+                    "message": "OK"
+                },
                 "headers": {
+                    "x-xss-protection": [
+                        "1; mode=block"
+                    ],
+                    "cache-control": [
+                        "no-store"
+                    ],
                     "content-type": [
                         "application/json"
-                    ], 
+                    ],
                     "date": [
-                        "Thu, 10 Jan 2019 04:08:27 GMT"
-                    ], 
+                        "Sun, 05 May 2019 13:31:23 GMT"
+                    ],
+                    "connection": [
+                        "keep-alive"
+                    ],
+                    "strict-transport-security": [
+                        "max-age=31536000; includeSubdomains; preload"
+                    ],
+                    "vary": [
+                        "Accept-Encoding"
+                    ],
+                    "x-frame-options": [
+                        "Deny"
+                    ],
+                    "content-length": [
+                        "306"
+                    ],
                     "x-content-type-options": [
                         "nosniff"
-                    ], 
-                    "cache-control": [
-                        "no-store"
-                    ], 
-                    "server": [
-                        "Apache/2.4.10 (Debian)"
-                    ], 
-                    "x-xss-protection": [
-                        "1; mode=block"
-                    ], 
-                    "x-frame-options": [
-                        "Deny"
-                    ], 
+                    ],
                     "expires": [
                         "Sat, 01 Jan 2000 00:00:00 GMT"
-                    ], 
-                    "set-cookie": [
-                        "phsid=A%2Fkclyjmm2warvrxwksppx3qxupj4f72ejvxuavrn5; expires=Tue, 09-Jan-2024 04:08:27 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly"
-                    ], 
-                    "transfer-encoding": [
-                        "chunked"
-                    ], 
-                    "strict-transport-security": [
-                        "max-age=0; includeSubdomains; preload"
                     ]
-                }, 
-                "status": {
-                    "message": "OK", 
-                    "code": 200
-                }, 
+                },
                 "body": {
-                    "string": "{\"result\":{\"object\":{\"id\":5544,\"phid\":\"PHID-DREV-bwugldlyieuwzrk76xzy\"},\"transactions\":[{\"phid\":\"PHID-XACT-DREV-wojlvnhodzdoqh6\"},{\"phid\":\"PHID-XACT-DREV-ju3bw7rltmmwpbf\"},{\"phid\":\"PHID-XACT-DREV-2hwwi7dagftdp6q\"},{\"phid\":\"PHID-XACT-DREV-zfsyu5o7wkqzh6s\"},{\"phid\":\"PHID-XACT-DREV-srrkwmheqn6gssk\"}]},\"error_code\":null,\"error_info\":null}"
+                    "string": "{\"result\":{\"errors\":[],\"fields\":{\"title\":\"create public change for phabricator testing\"},\"revisionIDFieldInfo\":{\"value\":null,\"validDomain\":\"https:\\/\\/phab.mercurial-scm.org\"},\"transactions\":[{\"type\":\"title\",\"value\":\"create public change for phabricator testing\"}]},\"error_code\":null,\"error_info\":null}"
                 }
-            }, 
+            }
+        },
+        {
             "request": {
-                "method": "POST", 
-                "uri": "https://phab.mercurial-scm.org//api/differential.revision.edit", 
                 "headers": {
+                    "user-agent": [
+                        "mercurial/proto-1.0 (Mercurial 5.0+93-d811f17090a3+20190505)"
+                    ],
                     "content-length": [
                         "253"
-                    ], 
+                    ],
+                    "content-type": [
+                        "application/x-www-form-urlencoded"
+                    ],
                     "accept": [
                         "application/mercurial-0.1"
-                    ], 
-                    "content-type": [
-                        "application/x-www-form-urlencoded"
-                    ], 
-                    "user-agent": [
-                        "mercurial/proto-1.0 (Mercurial 4.8.2+682-e2cf04a597cc+20190109)"
-                    ], 
+                    ],
                     "host": [
                         "phab.mercurial-scm.org"
                     ]
-                }, 
-                "body": "transactions%5B0%5D%5Btype%5D=update&transactions%5B0%5D%5Bvalue%5D=PHID-DIFF-xrku5f3mlveqr3hhj6a7&transactions%5B1%5D%5Btype%5D=title&transactions%5B1%5D%5Bvalue%5D=create+public+change+for+phabricator+testing&api.token=cli-hahayouwish"
-            }
-        }, 
-        {
+                },
+                "body": "api.token=cli-hahayouwish&transactions%5B0%5D%5Btype%5D=update&transactions%5B0%5D%5Bvalue%5D=PHID-DIFF-uuzq4s7s72y4ts7ijduc&transactions%5B1%5D%5Btype%5D=title&transactions%5B1%5D%5Bvalue%5D=create+public+change+for+phabricator+testing",
+                "uri": "https://phab.mercurial-scm.org//api/differential.revision.edit",
+                "method": "POST"
+            },
             "response": {
+                "status": {
+                    "code": 200,
+                    "message": "OK"
+                },
                 "headers": {
+                    "x-xss-protection": [
+                        "1; mode=block"
+                    ],
+                    "cache-control": [
+                        "no-store"
+                    ],
                     "content-type": [
                         "application/json"
-                    ], 
+                    ],
                     "date": [
-                        "Thu, 10 Jan 2019 04:08:28 GMT"
-                    ], 
+                        "Sun, 05 May 2019 13:31:24 GMT"
+                    ],
+                    "connection": [
+                        "keep-alive"
+                    ],
+                    "strict-transport-security": [
+                        "max-age=31536000; includeSubdomains; preload"
+                    ],
+                    "vary": [
+                        "Accept-Encoding"
+                    ],
+                    "x-frame-options": [
+                        "Deny"
+                    ],
+                    "content-length": [
+                        "294"
+                    ],
                     "x-content-type-options": [
                         "nosniff"
-                    ], 
-                    "cache-control": [
-                        "no-store"
-                    ], 
-                    "server": [
-                        "Apache/2.4.10 (Debian)"
-                    ], 
-                    "x-xss-protection": [
-                        "1; mode=block"
-                    ], 
-                    "x-frame-options": [
-                        "Deny"
-                    ], 
+                    ],
                     "expires": [
                         "Sat, 01 Jan 2000 00:00:00 GMT"
-                    ], 
-                    "set-cookie": [
-                        "phsid=A%2Fbw4ordbzl7d4hcgyyxnoawhrfhycrvvkk6arnz5p; expires=Tue, 09-Jan-2024 04:08:28 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly"
-                    ], 
-                    "transfer-encoding": [
-                        "chunked"
-                    ], 
-                    "strict-transport-security": [
-                        "max-age=0; includeSubdomains; preload"
                     ]
-                }, 
-                "status": {
-                    "message": "OK", 
-                    "code": 200
-                }, 
+                },
                 "body": {
-                    "string": "{\"result\":{\"id\":13122,\"phid\":\"PHID-DIFF-iksauhhfhmxfjijyqxji\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/differential\\/diff\\/13122\\/\"},\"error_code\":null,\"error_info\":null}"
+                    "string": "{\"result\":{\"object\":{\"id\":1192,\"phid\":\"PHID-DREV-qb4xy3abx7eu4puizvjl\"},\"transactions\":[{\"phid\":\"PHID-XACT-DREV-n2zlzs5qmdlvfbx\"},{\"phid\":\"PHID-XACT-DREV-dwojtdj2d3geffe\"},{\"phid\":\"PHID-XACT-DREV-gr4vgeynol22tgf\"},{\"phid\":\"PHID-XACT-DREV-aighrcyai72tgzv\"}]},\"error_code\":null,\"error_info\":null}"
                 }
-            }, 
+            }
+        },
+        {
             "request": {
-                "method": "POST", 
-                "uri": "https://phab.mercurial-scm.org//api/differential.createrawdiff", 
                 "headers": {
+                    "user-agent": [
+                        "mercurial/proto-1.0 (Mercurial 5.0+93-d811f17090a3+20190505)"
+                    ],
                     "content-length": [
                         "232"
-                    ], 
+                    ],
+                    "content-type": [
+                        "application/x-www-form-urlencoded"
+                    ],
                     "accept": [
                         "application/mercurial-0.1"
-                    ], 
-                    "content-type": [
-                        "application/x-www-form-urlencoded"
-                    ], 
-                    "user-agent": [
-                        "mercurial/proto-1.0 (Mercurial 4.8.2+682-e2cf04a597cc+20190109)"
-                    ], 
+                    ],
                     "host": [
                         "phab.mercurial-scm.org"
                     ]
-                }, 
-                "body": "repositoryPHID=PHID-REPO-bvunnehri4u2isyr7bc3&diff=diff+--git+a%2Falpha+b%2Falpha%0A---+a%2Falpha%0A%2B%2B%2B+b%2Falpha%0A%40%40+-1%2C2+%2B1%2C1+%40%40%0A-alpha%0A-more%0A%2Bdraft+change%0A&api.token=cli-hahayouwish"
-            }
-        }, 
-        {
+                },
+                "body": "api.token=cli-hahayouwish&diff=diff+--git+a%2Falpha+b%2Falpha%0A---+a%2Falpha%0A%2B%2B%2B+b%2Falpha%0A%40%40+-1%2C2+%2B1%2C1+%40%40%0A-alpha%0A-more%0A%2Bdraft+change%0A&repositoryPHID=PHID-REPO-bvunnehri4u2isyr7bc3",
+                "uri": "https://phab.mercurial-scm.org//api/differential.createrawdiff",
+                "method": "POST"
+            },
             "response": {
+                "status": {
+                    "code": 200,
+                    "message": "OK"
+                },
                 "headers": {
+                    "x-xss-protection": [
+                        "1; mode=block"
+                    ],
+                    "cache-control": [
+                        "no-store"
+                    ],
                     "content-type": [
                         "application/json"
-                    ], 
+                    ],
                     "date": [
-                        "Thu, 10 Jan 2019 04:08:29 GMT"
-                    ], 
+                        "Sun, 05 May 2019 13:31:25 GMT"
+                    ],
+                    "connection": [
+                        "keep-alive"
+                    ],
+                    "strict-transport-security": [
+                        "max-age=31536000; includeSubdomains; preload"
+                    ],
+                    "vary": [
+                        "Accept-Encoding"
+                    ],
+                    "x-frame-options": [
+                        "Deny"
+                    ],
+                    "content-length": [
+                        "172"
+                    ],
                     "x-content-type-options": [
                         "nosniff"
-                    ], 
-                    "cache-control": [
-                        "no-store"
-                    ], 
-                    "server": [
-                        "Apache/2.4.10 (Debian)"
-                    ], 
-                    "x-xss-protection": [
-                        "1; mode=block"
-                    ], 
-                    "x-frame-options": [
-                        "Deny"
-                    ], 
+                    ],
                     "expires": [
                         "Sat, 01 Jan 2000 00:00:00 GMT"
-                    ], 
-                    "set-cookie": [
-                        "phsid=A%2Fgt3wmrrlkmpdhyaj5rsesxcwbabhpjlhoa6matcg; expires=Tue, 09-Jan-2024 04:08:29 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly"
-                    ], 
-                    "transfer-encoding": [
-                        "chunked"
-                    ], 
-                    "strict-transport-security": [
-                        "max-age=0; includeSubdomains; preload"
                     ]
-                }, 
-                "status": {
-                    "message": "OK", 
-                    "code": 200
-                }, 
+                },
                 "body": {
-                    "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
+                    "string": "{\"result\":{\"id\":1903,\"phid\":\"PHID-DIFF-4pugk2zedyh2xm27uuvh\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/differential\\/diff\\/1903\\/\"},\"error_code\":null,\"error_info\":null}"
                 }
-            }, 
+            }
+        },
+        {
             "request": {
-                "method": "POST", 
-                "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty", 
                 "headers": {
+                    "user-agent": [
+                        "mercurial/proto-1.0 (Mercurial 5.0+93-d811f17090a3+20190505)"
+                    ],
                     "content-length": [
-                        "264"
-                    ], 
+                        "296"
+                    ],
+                    "content-type": [
+                        "application/x-www-form-urlencoded"
+                    ],
                     "accept": [
                         "application/mercurial-0.1"
-                    ], 
-                    "content-type": [
-                        "application/x-www-form-urlencoded"
-                    ], 
-                    "user-agent": [
-                        "mercurial/proto-1.0 (Mercurial 4.8.2+682-e2cf04a597cc+20190109)"
-                    ], 
+                    ],
                     "host": [
                         "phab.mercurial-scm.org"
                     ]
-                }, 
-                "body": "name=hg%3Ameta&api.token=cli-hahayouwish&data=%7B%22date%22%3A+%220+0%22%2C+%22user%22%3A+%22test%22%2C+%22node%22%3A+%226bca752686cd24e603094ef55574655c0017723a%22%2C+%22parent%22%3A+%22540a21d3fbeb7c56cafe726bba6cd9fdcc94f29c%22%7D&diff_id=13122"
-            }
-        }, 
-        {
+                },
+                "body": "diff_id=1903&data=%7B%22branch%22%3A+%22default%22%2C+%22date%22%3A+%220+0%22%2C+%22node%22%3A+%22ac331633be793e0d4159d5525b404a9782f54904%22%2C+%22parent%22%3A+%2224ffd6bca53a1e05369ed5b8834587c2b2b364da%22%2C+%22user%22%3A+%22test%22%7D&name=hg%3Ameta&api.token=cli-hahayouwish",
+                "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
+                "method": "POST"
+            },
             "response": {
+                "status": {
+                    "code": 200,
+                    "message": "OK"
+                },
                 "headers": {
-                    "content-type": [
-                        "application/json"
-                    ], 
-                    "date": [
-                        "Thu, 10 Jan 2019 04:08:29 GMT"
-                    ], 
-                    "x-content-type-options": [
-                        "nosniff"
-                    ], 
+                    "x-xss-protection": [
+                        "1; mode=block"
+                    ],
                     "cache-control": [
                         "no-store"
-                    ], 
-                    "server": [
-                        "Apache/2.4.10 (Debian)"
-                    ], 
-                    "x-xss-protection": [
-                        "1; mode=block"
-                    ], 
+                    ],
+                    "content-type": [
+                        "application/json"
+                    ],
+                    "date": [
+                        "Sun, 05 May 2019 13:31:26 GMT"
+                    ],
+                    "connection": [
+                        "keep-alive"
+                    ],
+                    "strict-transport-security": [
+                        "max-age=31536000; includeSubdomains; preload"
+                    ],
+                    "vary": [
+                        "Accept-Encoding"
+                    ],
                     "x-frame-options": [
                         "Deny"
-                    ], 
+                    ],
+                    "content-length": [
+                        "51"
+                    ],
+                    "x-content-type-options": [
+                        "nosniff"
+                    ],
                     "expires": [
                         "Sat, 01 Jan 2000 00:00:00 GMT"
-                    ], 
-                    "set-cookie": [
-                        "phsid=A%2Fntcsqzh6pptdkfnebvmck6l3y3rrwxzotvsq4phl; expires=Tue, 09-Jan-2024 04:08:29 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly"
-                    ], 
-                    "transfer-encoding": [
-                        "chunked"
-                    ], 
-                    "strict-transport-security": [
-                        "max-age=0; includeSubdomains; preload"
                     ]
-                }, 
-                "status": {
-                    "message": "OK", 
-                    "code": 200
-                }, 
+                },
                 "body": {
                     "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
                 }
-            }, 
+            }
+        },
+        {
             "request": {
-                "method": "POST", 
-                "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty", 
                 "headers": {
+                    "user-agent": [
+                        "mercurial/proto-1.0 (Mercurial 5.0+93-d811f17090a3+20190505)"
+                    ],
                     "content-length": [
-                        "227"
-                    ], 
+                        "257"
+                    ],
+                    "content-type": [
+                        "application/x-www-form-urlencoded"
+                    ],
                     "accept": [
                         "application/mercurial-0.1"
-                    ], 
-                    "content-type": [
-                        "application/x-www-form-urlencoded"
-                    ], 
-                    "user-agent": [
-                        "mercurial/proto-1.0 (Mercurial 4.8.2+682-e2cf04a597cc+20190109)"
-                    ], 
+                    ],
                     "host": [
                         "phab.mercurial-scm.org"
                     ]
-                }, 
-                "body": "name=local%3Acommits&api.token=cli-hahayouwish&data=%7B%226bca752686cd24e603094ef55574655c0017723a%22%3A+%7B%22author%22%3A+%22test%22%2C+%22authorEmail%22%3A+%22test%22%2C+%22time%22%3A+0.0%7D%7D&diff_id=13122"
-            }
-        }, 
-        {
+                },
+                "body": "diff_id=1903&data=%7B%22ac331633be793e0d4159d5525b404a9782f54904%22%3A+%7B%22author%22%3A+%22test%22%2C+%22authorEmail%22%3A+%22test%22%2C+%22branch%22%3A+%22default%22%2C+%22commit%22%3A+%22ac331633be793e0d4159d5525b404a9782f54904%22%2C+%22parents%22%3A+%5B%2224ffd6bca53a1e05369ed5b8834587c2b2b364da%22%5D%2C+%22time%22%3A+0%7D%7D&name=local%3Acommits&api.token=cli-hahayouwish",
+                "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
+                "method": "POST"
+            },
             "response": {
+                "status": {
+                    "code": 200,
+                    "message": "OK"
+                },
                 "headers": {
+                    "x-xss-protection": [
+                        "1; mode=block"
+                    ],
+                    "cache-control": [
+                        "no-store"
+                    ],
                     "content-type": [
                         "application/json"
-                    ], 
+                    ],
                     "date": [
-                        "Thu, 10 Jan 2019 04:08:30 GMT"
-                    ], 
+                        "Sun, 05 May 2019 13:31:27 GMT"
+                    ],
+                    "connection": [
+                        "keep-alive"
+                    ],
+                    "strict-transport-security": [
+                        "max-age=31536000; includeSubdomains; preload"
+                    ],
+                    "vary": [
+                        "Accept-Encoding"
+                    ],
+                    "x-frame-options": [
+                        "Deny"
+                    ],
+                    "content-length": [
+                        "51"
+                    ],
                     "x-content-type-options": [
                         "nosniff"
-                    ], 
-                    "cache-control": [
-                        "no-store"
-                    ], 
-                    "server": [
-                        "Apache/2.4.10 (Debian)"
-                    ], 
-                    "x-xss-protection": [
-                        "1; mode=block"
-                    ], 
-                    "x-frame-options": [
-                        "Deny"
-                    ], 
+                    ],
                     "expires": [
                         "Sat, 01 Jan 2000 00:00:00 GMT"
-                    ], 
-                    "set-cookie": [
-                        "phsid=A%2Fgturi5p5fz64q26mztdrzjldzynp62pp7opcxsnm; expires=Tue, 09-Jan-2024 04:08:30 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly"
-                    ], 
-                    "transfer-encoding": [
-                        "chunked"
-                    ], 
-                    "strict-transport-security": [
-                        "max-age=0; includeSubdomains; preload"
                     ]
-                }, 
-                "status": {
-                    "message": "OK", 
-                    "code": 200
-                }, 
+                },
                 "body": {
-                    "string": "{\"result\":{\"errors\":[],\"fields\":{\"title\":\"create draft change for phabricator testing\"},\"revisionIDFieldInfo\":{\"value\":null,\"validDomain\":\"https:\\/\\/phab.mercurial-scm.org\"}},\"error_code\":null,\"error_info\":null}"
+                    "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
                 }
-            }, 
+            }
+        },
+        {
             "request": {
-                "method": "POST", 
-                "uri": "https://phab.mercurial-scm.org//api/differential.parsecommitmessage", 
                 "headers": {
+                    "user-agent": [
+                        "mercurial/proto-1.0 (Mercurial 5.0+93-d811f17090a3+20190505)"
+                    ],
                     "content-length": [
                         "93"
-                    ], 
+                    ],
+                    "content-type": [
+                        "application/x-www-form-urlencoded"
+                    ],
                     "accept": [
                         "application/mercurial-0.1"
-                    ], 
-                    "content-type": [
-                        "application/x-www-form-urlencoded"
-                    ], 
-                    "user-agent": [
-                        "mercurial/proto-1.0 (Mercurial 4.8.2+682-e2cf04a597cc+20190109)"
-                    ], 
+                    ],
                     "host": [
                         "phab.mercurial-scm.org"
                     ]
-                }, 
-                "body": "corpus=create+draft+change+for+phabricator+testing&api.token=cli-hahayouwish"
-            }
-        }, 
-        {
+                },
+                "body": "api.token=cli-hahayouwish&corpus=create+draft+change+for+phabricator+testing",
+                "uri": "https://phab.mercurial-scm.org//api/differential.parsecommitmessage",
+                "method": "POST"
+            },
             "response": {
+                "status": {
+                    "code": 200,
+                    "message": "OK"
+                },
                 "headers": {
+                    "x-xss-protection": [
+                        "1; mode=block"
+                    ],
+                    "cache-control": [
+                        "no-store"
+                    ],
                     "content-type": [
                         "application/json"
-                    ], 
+                    ],
                     "date": [
-                        "Thu, 10 Jan 2019 04:08:31 GMT"
-                    ], 
+                        "Sun, 05 May 2019 13:31:27 GMT"
+                    ],
+                    "connection": [
+                        "keep-alive"
+                    ],
+                    "strict-transport-security": [
+                        "max-age=31536000; includeSubdomains; preload"
+                    ],
+                    "vary": [
+                        "Accept-Encoding"
+                    ],
+                    "x-frame-options": [
+                        "Deny"
+                    ],
+                    "content-length": [
+                        "304"
+                    ],
                     "x-content-type-options": [
                         "nosniff"
-                    ], 
-                    "cache-control": [
-                        "no-store"
-                    ], 
-                    "server": [
-                        "Apache/2.4.10 (Debian)"
-                    ], 
-                    "x-xss-protection": [
-                        "1; mode=block"
-                    ], 
-                    "x-frame-options": [
-                        "Deny"
-                    ], 
+                    ],
                     "expires": [
                         "Sat, 01 Jan 2000 00:00:00 GMT"
-                    ], 
-                    "set-cookie": [
-                        "phsid=A%2F4vyvyabatbn7y5bhav6nthgdt4mm6oeh6ybvnrl5; expires=Tue, 09-Jan-2024 04:08:31 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly"
-                    ], 
-                    "transfer-encoding": [
-                        "chunked"
-                    ], 
-                    "strict-transport-security": [
-                        "max-age=0; includeSubdomains; preload"
                     ]
-                }, 
-                "status": {
-                    "message": "OK", 
-                    "code": 200
-                }, 
+                },
                 "body": {
-                    "string": "{\"result\":{\"object\":{\"id\":5545,\"phid\":\"PHID-DREV-ga6i6vbmatvd2fszrr2o\"},\"transactions\":[{\"phid\":\"PHID-XACT-DREV-epqu5uekkf4ig67\"},{\"phid\":\"PHID-XACT-DREV-y3t5z573bwbqv7e\"},{\"phid\":\"PHID-XACT-DREV-dmjvlq7wngqgwxv\"},{\"phid\":\"PHID-XACT-DREV-rkm576j6wvji3ye\"},{\"phid\":\"PHID-XACT-DREV-mb7ttr44lno6j2w\"},{\"phid\":\"PHID-XACT-DREV-ma747d2dkzk3eun\"},{\"phid\":\"PHID-XACT-DREV-3u7lqg7mwxrix5w\"},{\"phid\":\"PHID-XACT-DREV-r33n73dqn7doz7b\"}]},\"error_code\":null,\"error_info\":null}"
+                    "string": "{\"result\":{\"errors\":[],\"fields\":{\"title\":\"create draft change for phabricator testing\"},\"revisionIDFieldInfo\":{\"value\":null,\"validDomain\":\"https:\\/\\/phab.mercurial-scm.org\"},\"transactions\":[{\"type\":\"title\",\"value\":\"create draft change for phabricator testing\"}]},\"error_code\":null,\"error_info\":null}"
                 }
-            }, 
+            }
+        },
+        {
             "request": {
-                "method": "POST", 
-                "uri": "https://phab.mercurial-scm.org//api/differential.revision.edit", 
                 "headers": {
+                    "user-agent": [
+                        "mercurial/proto-1.0 (Mercurial 5.0+93-d811f17090a3+20190505)"
+                    ],
                     "content-length": [
                         "409"
-                    ], 
+                    ],
+                    "content-type": [
+                        "application/x-www-form-urlencoded"
+                    ],
                     "accept": [
                         "application/mercurial-0.1"
-                    ], 
-                    "content-type": [
-                        "application/x-www-form-urlencoded"
-                    ], 
-                    "user-agent": [
-                        "mercurial/proto-1.0 (Mercurial 4.8.2+682-e2cf04a597cc+20190109)"
-                    ], 
+                    ],
                     "host": [
                         "phab.mercurial-scm.org"
                     ]
-                }, 
-                "body": "transactions%5B0%5D%5Btype%5D=update&transactions%5B0%5D%5Bvalue%5D=PHID-DIFF-iksauhhfhmxfjijyqxji&transactions%5B1%5D%5Btype%5D=summary&transactions%5B1%5D%5Bvalue%5D=Depends+on+D5544&transactions%5B2%5D%5Btype%5D=summary&transactions%5B2%5D%5Bvalue%5D=+&transactions%5B3%5D%5Btype%5D=title&transactions%5B3%5D%5Bvalue%5D=create+draft+change+for+phabricator+testing&api.token=cli-hahayouwish"
-            }
-        }, 
-        {
+                },
+                "body": "api.token=cli-hahayouwish&transactions%5B0%5D%5Btype%5D=update&transactions%5B0%5D%5Bvalue%5D=PHID-DIFF-4pugk2zedyh2xm27uuvh&transactions%5B1%5D%5Btype%5D=summary&transactions%5B1%5D%5Bvalue%5D=Depends+on+D1192&transactions%5B2%5D%5Btype%5D=summary&transactions%5B2%5D%5Bvalue%5D=+&transactions%5B3%5D%5Btype%5D=title&transactions%5B3%5D%5Bvalue%5D=create+draft+change+for+phabricator+testing",
+                "uri": "https://phab.mercurial-scm.org//api/differential.revision.edit",
+                "method": "POST"
+            },
             "response": {
+                "status": {
+                    "code": 200,
+                    "message": "OK"
+                },
                 "headers": {
+                    "x-xss-protection": [
+                        "1; mode=block"
+                    ],
+                    "cache-control": [
+                        "no-store"
+                    ],
                     "content-type": [
                         "application/json"
-                    ], 
+                    ],
                     "date": [
-                        "Thu, 10 Jan 2019 04:08:32 GMT"
-                    ], 
+                        "Sun, 05 May 2019 13:31:29 GMT"
+                    ],
+                    "connection": [
+                        "keep-alive"
+                    ],
+                    "strict-transport-security": [
+                        "max-age=31536000; includeSubdomains; preload"
+                    ],
+                    "vary": [
+                        "Accept-Encoding"
+                    ],
+                    "x-frame-options": [
+                        "Deny"
+                    ],
+                    "content-length": [
+                        "420"
+                    ],
                     "x-content-type-options": [
                         "nosniff"
-                    ], 
-                    "cache-control": [
-                        "no-store"
-                    ], 
-                    "server": [
-                        "Apache/2.4.10 (Debian)"
-                    ], 
-                    "x-xss-protection": [
-                        "1; mode=block"
-                    ], 
-                    "x-frame-options": [
-                        "Deny"
-                    ], 
+                    ],
                     "expires": [
                         "Sat, 01 Jan 2000 00:00:00 GMT"
-                    ], 
-                    "set-cookie": [
-                        "phsid=A%2Fvd66cz7uxztfwfapgqrlmfmoj7szo5wvwk7vqc2u; expires=Tue, 09-Jan-2024 04:08:32 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly"
-                    ], 
-                    "transfer-encoding": [
-                        "chunked"
-                    ], 
-                    "strict-transport-security": [
-                        "max-age=0; includeSubdomains; preload"
                     ]
-                }, 
-                "status": {
-                    "message": "OK", 
-                    "code": 200
-                }, 
+                },
                 "body": {
-                    "string": "{\"result\":[{\"id\":\"5545\",\"phid\":\"PHID-DREV-ga6i6vbmatvd2fszrr2o\",\"title\":\"create draft change for phabricator testing\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/D5545\",\"dateCreated\":\"1547093311\",\"dateModified\":\"1547093311\",\"authorPHID\":\"PHID-USER-tzhaient733lwrlbcag5\",\"status\":\"0\",\"statusName\":\"Needs Review\",\"properties\":[],\"branch\":null,\"summary\":\" \",\"testPlan\":\"\",\"lineCount\":\"3\",\"activeDiffPHID\":\"PHID-DIFF-iksauhhfhmxfjijyqxji\",\"diffs\":[\"13122\"],\"commits\":[],\"reviewers\":{\"PHID-PROJ-3dvcxzznrjru2xmmses3\":\"PHID-PROJ-3dvcxzznrjru2xmmses3\"},\"ccs\":[\"PHID-USER-q42dn7cc3donqriafhjx\"],\"hashes\":[],\"auxiliary\":{\"phabricator:projects\":[],\"phabricator:depends-on\":[\"PHID-DREV-bwugldlyieuwzrk76xzy\"]},\"repositoryPHID\":\"PHID-REPO-bvunnehri4u2isyr7bc3\",\"sourcePath\":null},{\"id\":\"5544\",\"phid\":\"PHID-DREV-bwugldlyieuwzrk76xzy\",\"title\":\"create public change for phabricator testing\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/D5544\",\"dateCreated\":\"1547093307\",\"dateModified\":\"1547093311\",\"authorPHID\":\"PHID-USER-tzhaient733lwrlbcag5\",\"status\":\"0\",\"statusName\":\"Needs Review\",\"properties\":[],\"branch\":null,\"summary\":\"\",\"testPlan\":\"\",\"lineCount\":\"2\",\"activeDiffPHID\":\"PHID-DIFF-xrku5f3mlveqr3hhj6a7\",\"diffs\":[\"13121\"],\"commits\":[],\"reviewers\":{\"PHID-PROJ-3dvcxzznrjru2xmmses3\":\"PHID-PROJ-3dvcxzznrjru2xmmses3\"},\"ccs\":[\"PHID-USER-q42dn7cc3donqriafhjx\"],\"hashes\":[],\"auxiliary\":{\"phabricator:projects\":[],\"phabricator:depends-on\":[]},\"repositoryPHID\":\"PHID-REPO-bvunnehri4u2isyr7bc3\",\"sourcePath\":null}],\"error_code\":null,\"error_info\":null}"
+                    "string": "{\"result\":{\"object\":{\"id\":1193,\"phid\":\"PHID-DREV-shdibf6gnumia7pou4wo\"},\"transactions\":[{\"phid\":\"PHID-XACT-DREV-5lh4bjyat7sopph\"},{\"phid\":\"PHID-XACT-DREV-ihh5mnfq4lfd7z6\"},{\"phid\":\"PHID-XACT-DREV-jqgmk2a3klvofsk\"},{\"phid\":\"PHID-XACT-DREV-w5t5g4ke6kjynf3\"},{\"phid\":\"PHID-XACT-DREV-ro7ijohdoyaes55\"},{\"phid\":\"PHID-XACT-DREV-4g3uhii5akj24he\"},{\"phid\":\"PHID-XACT-DREV-44imsawbkha5nqw\"}]},\"error_code\":null,\"error_info\":null}"
                 }
-            }, 
+            }
+        },
+        {
             "request": {
-                "method": "POST", 
-                "uri": "https://phab.mercurial-scm.org//api/differential.query", 
                 "headers": {
+                    "user-agent": [
+                        "mercurial/proto-1.0 (Mercurial 5.0+93-d811f17090a3+20190505)"
+                    ],
                     "content-length": [
                         "74"
-                    ], 
+                    ],
+                    "content-type": [
+                        "application/x-www-form-urlencoded"
+                    ],
                     "accept": [
                         "application/mercurial-0.1"
-                    ], 
-                    "content-type": [
-                        "application/x-www-form-urlencoded"
-                    ], 
-                    "user-agent": [
-                        "mercurial/proto-1.0 (Mercurial 4.8.2+682-e2cf04a597cc+20190109)"
-                    ], 
+                    ],
                     "host": [
                         "phab.mercurial-scm.org"
                     ]
-                }, 
-                "body": "ids%5B0%5D=5544&ids%5B1%5D=5545&api.token=cli-hahayouwish"
-            }
-        }, 
-        {
+                },
+                "body": "ids%5B0%5D=1192&ids%5B1%5D=1193&api.token=cli-hahayouwish",
+                "uri": "https://phab.mercurial-scm.org//api/differential.query",
+                "method": "POST"
+            },
             "response": {
+                "status": {
+                    "code": 200,
+                    "message": "OK"
+                },
                 "headers": {
+                    "x-xss-protection": [
+                        "1; mode=block"
+                    ],
+                    "cache-control": [
+                        "no-store"
+                    ],
                     "content-type": [
                         "application/json"
-                    ], 
+                    ],
                     "date": [
-                        "Thu, 10 Jan 2019 04:08:32 GMT"
-                    ], 
+                        "Sun, 05 May 2019 13:31:29 GMT"
+                    ],
+                    "connection": [
+                        "keep-alive"
+                    ],
+                    "strict-transport-security": [
+                        "max-age=31536000; includeSubdomains; preload"
+                    ],
+                    "vary": [
+                        "Accept-Encoding"
+                    ],
+                    "x-frame-options": [
+                        "Deny"
+                    ],
+                    "content-length": [
+                        "1522"
+                    ],
                     "x-content-type-options": [
                         "nosniff"
-                    ], 
-                    "cache-control": [
-                        "no-store"
-                    ], 
-                    "server": [
-                        "Apache/2.4.10 (Debian)"
-                    ], 
-                    "x-xss-protection": [
-                        "1; mode=block"
-                    ], 
-                    "x-frame-options": [
-                        "Deny"
-                    ], 
+                    ],
                     "expires": [
                         "Sat, 01 Jan 2000 00:00:00 GMT"
-                    ], 
-                    "set-cookie": [
-                        "phsid=A%2Fbqbv2blmnjqe3a5qkpewf5wghxqwcuewjbgfrtq7; expires=Tue, 09-Jan-2024 04:08:32 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly"
-                    ], 
-                    "transfer-encoding": [
-                        "chunked"
-                    ], 
-                    "strict-transport-security": [
-                        "max-age=0; includeSubdomains; preload"
                     ]
-                }, 
-                "status": {
-                    "message": "OK", 
-                    "code": 200
-                }, 
+                },
                 "body": {
-                    "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
+                    "string": "{\"result\":[{\"id\":\"1193\",\"phid\":\"PHID-DREV-shdibf6gnumia7pou4wo\",\"title\":\"create draft change for phabricator testing\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/D1193\",\"dateCreated\":\"1557063088\",\"dateModified\":\"1557063088\",\"authorPHID\":\"PHID-USER-qmzis76vb2yh3ogldu6r\",\"status\":\"0\",\"statusName\":\"Draft\",\"properties\":{\"draft.broadcast\":false,\"lines.added\":1,\"lines.removed\":2},\"branch\":null,\"summary\":\" \",\"testPlan\":\"\",\"lineCount\":\"3\",\"activeDiffPHID\":\"PHID-DIFF-4pugk2zedyh2xm27uuvh\",\"diffs\":[\"1903\"],\"commits\":[],\"reviewers\":[],\"ccs\":[],\"hashes\":[],\"auxiliary\":{\"bugzilla.bug-id\":null,\"phabricator:projects\":[\"PHID-PROJ-f2a3wl5wxtqdtfgdjqzk\"],\"phabricator:depends-on\":[\"PHID-DREV-qb4xy3abx7eu4puizvjl\"]},\"repositoryPHID\":\"PHID-REPO-bvunnehri4u2isyr7bc3\",\"sourcePath\":null},{\"id\":\"1192\",\"phid\":\"PHID-DREV-qb4xy3abx7eu4puizvjl\",\"title\":\"create public change for phabricator testing\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/D1192\",\"dateCreated\":\"1557063084\",\"dateModified\":\"1557063088\",\"authorPHID\":\"PHID-USER-qmzis76vb2yh3ogldu6r\",\"status\":\"0\",\"statusName\":\"Needs Review\",\"properties\":{\"draft.broadcast\":true,\"lines.added\":1,\"lines.removed\":1},\"branch\":null,\"summary\":\"\",\"testPlan\":\"\",\"lineCount\":\"2\",\"activeDiffPHID\":\"PHID-DIFF-uuzq4s7s72y4ts7ijduc\",\"diffs\":[\"1902\"],\"commits\":[],\"reviewers\":[],\"ccs\":[],\"hashes\":[],\"auxiliary\":{\"bugzilla.bug-id\":null,\"phabricator:projects\":[],\"phabricator:depends-on\":[]},\"repositoryPHID\":\"PHID-REPO-bvunnehri4u2isyr7bc3\",\"sourcePath\":null}],\"error_code\":null,\"error_info\":null}"
                 }
-            }, 
+            }
+        },
+        {
             "request": {
-                "method": "POST", 
-                "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty", 
                 "headers": {
+                    "user-agent": [
+                        "mercurial/proto-1.0 (Mercurial 5.0+93-d811f17090a3+20190505)"
+                    ],
                     "content-length": [
-                        "264"
-                    ], 
+                        "296"
+                    ],
+                    "content-type": [
+                        "application/x-www-form-urlencoded"
+                    ],
                     "accept": [
                         "application/mercurial-0.1"
-                    ], 
-                    "content-type": [
-                        "application/x-www-form-urlencoded"
-                    ], 
-                    "user-agent": [
-                        "mercurial/proto-1.0 (Mercurial 4.8.2+682-e2cf04a597cc+20190109)"
-                    ], 
+                    ],
                     "host": [
                         "phab.mercurial-scm.org"
                     ]
-                }, 
-                "body": "name=hg%3Ameta&api.token=cli-hahayouwish&data=%7B%22date%22%3A+%220+0%22%2C+%22user%22%3A+%22test%22%2C+%22node%22%3A+%22620a50fd6ed958bbee178052de67acc31dcac66e%22%2C+%22parent%22%3A+%22540a21d3fbeb7c56cafe726bba6cd9fdcc94f29c%22%7D&diff_id=13122"
-            }
-        }, 
-        {
+                },
+                "body": "diff_id=1903&data=%7B%22branch%22%3A+%22default%22%2C+%22date%22%3A+%220+0%22%2C+%22node%22%3A+%22a19f1434f9a578325eb9799c9961b5465d4e6e40%22%2C+%22parent%22%3A+%2224ffd6bca53a1e05369ed5b8834587c2b2b364da%22%2C+%22user%22%3A+%22test%22%7D&name=hg%3Ameta&api.token=cli-hahayouwish",
+                "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
+                "method": "POST"
+            },
             "response": {
+                "status": {
+                    "code": 200,
+                    "message": "OK"
+                },
                 "headers": {
-                    "content-type": [
-                        "application/json"
-                    ], 
-                    "date": [
-                        "Thu, 10 Jan 2019 04:08:33 GMT"
-                    ], 
-                    "x-content-type-options": [
-                        "nosniff"
-                    ], 
+                    "x-xss-protection": [
+                        "1; mode=block"
+                    ],
                     "cache-control": [
                         "no-store"
-                    ], 
-                    "server": [
-                        "Apache/2.4.10 (Debian)"
-                    ], 
-                    "x-xss-protection": [
-                        "1; mode=block"
-                    ], 
+                    ],
+                    "content-type": [
+                        "application/json"
+                    ],
+                    "date": [
+                        "Sun, 05 May 2019 13:31:30 GMT"
+                    ],
+                    "connection": [
+                        "keep-alive"
+                    ],
+                    "strict-transport-security": [
+                        "max-age=31536000; includeSubdomains; preload"
+                    ],
+                    "vary": [
+                        "Accept-Encoding"
+                    ],
                     "x-frame-options": [
                         "Deny"
-                    ], 
+                    ],
+                    "content-length": [
+                        "51"
+                    ],
+                    "x-content-type-options": [
+                        "nosniff"
+                    ],
                     "expires": [
                         "Sat, 01 Jan 2000 00:00:00 GMT"
-                    ], 
-                    "set-cookie": [
-                        "phsid=A%2Fic7sfd33zs7c44ojloujnoicm3roxnre45glurgz; expires=Tue, 09-Jan-2024 04:08:33 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly"
-                    ], 
-                    "transfer-encoding": [
-                        "chunked"
-                    ], 
-                    "strict-transport-security": [
-                        "max-age=0; includeSubdomains; preload"
                     ]
-                }, 
-                "status": {
-                    "message": "OK", 
-                    "code": 200
-                }, 
+                },
                 "body": {
                     "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
                 }
-            }, 
+            }
+        },
+        {
             "request": {
-                "method": "POST", 
-                "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty", 
                 "headers": {
+                    "user-agent": [
+                        "mercurial/proto-1.0 (Mercurial 5.0+93-d811f17090a3+20190505)"
+                    ],
                     "content-length": [
-                        "227"
-                    ], 
+                        "257"
+                    ],
+                    "content-type": [
+                        "application/x-www-form-urlencoded"
+                    ],
                     "accept": [
                         "application/mercurial-0.1"
-                    ], 
-                    "content-type": [
-                        "application/x-www-form-urlencoded"
-                    ], 
-                    "user-agent": [
-                        "mercurial/proto-1.0 (Mercurial 4.8.2+682-e2cf04a597cc+20190109)"
-                    ], 
+                    ],
                     "host": [
                         "phab.mercurial-scm.org"
                     ]
-                }, 
-                "body": "name=local%3Acommits&api.token=cli-hahayouwish&data=%7B%22620a50fd6ed958bbee178052de67acc31dcac66e%22%3A+%7B%22author%22%3A+%22test%22%2C+%22authorEmail%22%3A+%22test%22%2C+%22time%22%3A+0.0%7D%7D&diff_id=13122"
+                },
+                "body": "diff_id=1903&data=%7B%22a19f1434f9a578325eb9799c9961b5465d4e6e40%22%3A+%7B%22author%22%3A+%22test%22%2C+%22authorEmail%22%3A+%22test%22%2C+%22branch%22%3A+%22default%22%2C+%22commit%22%3A+%22a19f1434f9a578325eb9799c9961b5465d4e6e40%22%2C+%22parents%22%3A+%5B%2224ffd6bca53a1e05369ed5b8834587c2b2b364da%22%5D%2C+%22time%22%3A+0%7D%7D&name=local%3Acommits&api.token=cli-hahayouwish",
+                "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
+                "method": "POST"
+            },
+            "response": {
+                "status": {
+                    "code": 200,
+                    "message": "OK"
+                },
+                "headers": {
+                    "x-xss-protection": [
+                        "1; mode=block"
+                    ],
+                    "cache-control": [
+                        "no-store"
+                    ],
+                    "content-type": [
+                        "application/json"
+                    ],
+                    "date": [
+                        "Sun, 05 May 2019 13:31:31 GMT"
+                    ],
+                    "connection": [
+                        "keep-alive"
+                    ],
+                    "strict-transport-security": [
+                        "max-age=31536000; includeSubdomains; preload"
+                    ],
+                    "vary": [
+                        "Accept-Encoding"
+                    ],
+                    "x-frame-options": [
+                        "Deny"
+                    ],
+                    "content-length": [
+                        "51"
+                    ],
+                    "x-content-type-options": [
+                        "nosniff"
+                    ],
+                    "expires": [
+                        "Sat, 01 Jan 2000 00:00:00 GMT"
+                    ]
+                },
+                "body": {
+                    "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
+                }
             }
         }
-    ], 
-    "version": 1
-}
\ No newline at end of file
+    ]
+}
--- a/tests/phabricator/phabsend-update-alpha-create-beta.json	Mon May 20 10:08:28 2019 +0200
+++ b/tests/phabricator/phabsend-update-alpha-create-beta.json	Mon May 20 11:40:47 2019 -0400
@@ -1,1025 +1,1025 @@
 {
+    "version": 1,
     "interactions": [
         {
             "request": {
-                "method": "POST",
-                "body": "api.token=cli-hahayouwish&revisionIDs%5B0%5D=6054",
-                "uri": "https://phab.mercurial-scm.org//api/differential.querydiffs",
                 "headers": {
+                    "user-agent": [
+                        "mercurial/proto-1.0 (Mercurial 5.0+93-d811f17090a3+20190505)"
+                    ],
+                    "content-length": [
+                        "66"
+                    ],
                     "content-type": [
                         "application/x-www-form-urlencoded"
                     ],
                     "accept": [
                         "application/mercurial-0.1"
                     ],
-                    "user-agent": [
-                        "mercurial/proto-1.0 (Mercurial 4.9+477-7c86ec0ca5c5+20190303)"
-                    ],
                     "host": [
                         "phab.mercurial-scm.org"
-                    ],
-                    "content-length": [
-                        "66"
                     ]
-                }
+                },
+                "body": "api.token=cli-hahayouwish&revisionIDs%5B0%5D=1190",
+                "uri": "https://phab.mercurial-scm.org//api/differential.querydiffs",
+                "method": "POST"
             },
             "response": {
                 "status": {
                     "code": 200,
                     "message": "OK"
                 },
-                "body": {
-                    "string": "{\"result\":{\"14303\":{\"id\":\"14303\",\"revisionID\":\"6054\",\"dateCreated\":\"1551571944\",\"dateModified\":\"1551571947\",\"sourceControlBaseRevision\":null,\"sourceControlPath\":null,\"sourceControlSystem\":null,\"branch\":null,\"bookmark\":null,\"creationMethod\":\"web\",\"description\":null,\"unitStatus\":\"4\",\"lintStatus\":\"4\",\"changes\":[{\"id\":\"32287\",\"metadata\":{\"line:first\":1},\"oldPath\":null,\"currentPath\":\"alpha\",\"awayPaths\":[],\"oldProperties\":[],\"newProperties\":{\"unix:filemode\":\"100644\"},\"type\":\"1\",\"fileType\":\"1\",\"commitHash\":null,\"addLines\":\"1\",\"delLines\":\"0\",\"hunks\":[{\"oldOffset\":\"0\",\"newOffset\":\"1\",\"oldLength\":\"0\",\"newLength\":\"1\",\"addLines\":null,\"delLines\":null,\"isMissingOldNewline\":null,\"isMissingNewNewline\":null,\"corpus\":\"+alpha\\n\"}]}],\"properties\":{\"hg:meta\":{\"user\":\"test\",\"parent\":\"0000000000000000000000000000000000000000\",\"node\":\"cb03845d6dd98c72bec766c7ed08c693cc49817a\",\"date\":\"0 0\"},\"local:commits\":{\"cb03845d6dd98c72bec766c7ed08c693cc49817a\":{\"author\":\"test\",\"authorEmail\":\"test\",\"time\":0}}},\"authorName\":\"test\",\"authorEmail\":\"test\"}},\"error_code\":null,\"error_info\":null}"
-                },
                 "headers": {
-                    "expires": [
-                        "Sat, 01 Jan 2000 00:00:00 GMT"
-                    ],
                     "x-xss-protection": [
                         "1; mode=block"
                     ],
-                    "transfer-encoding": [
-                        "chunked"
-                    ],
-                    "date": [
-                        "Sun, 03 Mar 2019 00:12:30 GMT"
-                    ],
-                    "x-frame-options": [
-                        "Deny"
-                    ],
                     "cache-control": [
                         "no-store"
                     ],
                     "content-type": [
                         "application/json"
                     ],
+                    "date": [
+                        "Sun, 05 May 2019 13:31:08 GMT"
+                    ],
+                    "connection": [
+                        "keep-alive"
+                    ],
+                    "strict-transport-security": [
+                        "max-age=31536000; includeSubdomains; preload"
+                    ],
+                    "vary": [
+                        "Accept-Encoding"
+                    ],
+                    "x-frame-options": [
+                        "Deny"
+                    ],
+                    "content-length": [
+                        "1132"
+                    ],
                     "x-content-type-options": [
                         "nosniff"
                     ],
-                    "server": [
-                        "Apache/2.4.10 (Debian)"
-                    ],
-                    "set-cookie": [
-                        "phsid=A%2Fnf3xdxgvvgky277foc7s2p6xrgtsvn4bzmayrbmb; expires=Fri, 01-Mar-2024 00:12:30 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly"
-                    ],
-                    "strict-transport-security": [
-                        "max-age=0; includeSubdomains; preload"
+                    "expires": [
+                        "Sat, 01 Jan 2000 00:00:00 GMT"
                     ]
+                },
+                "body": {
+                    "string": "{\"result\":{\"1899\":{\"id\":\"1899\",\"revisionID\":\"1190\",\"dateCreated\":\"1557063061\",\"dateModified\":\"1557063064\",\"sourceControlBaseRevision\":null,\"sourceControlPath\":null,\"sourceControlSystem\":null,\"branch\":null,\"bookmark\":null,\"creationMethod\":\"web\",\"description\":null,\"unitStatus\":\"4\",\"lintStatus\":\"4\",\"changes\":[{\"id\":\"4355\",\"metadata\":{\"line:first\":1,\"hash.effect\":\"g6dr_XSxA9EP\"},\"oldPath\":null,\"currentPath\":\"alpha\",\"awayPaths\":[],\"oldProperties\":[],\"newProperties\":{\"unix:filemode\":\"100644\"},\"type\":\"1\",\"fileType\":\"1\",\"commitHash\":null,\"addLines\":\"1\",\"delLines\":\"0\",\"hunks\":[{\"oldOffset\":\"0\",\"newOffset\":\"1\",\"oldLength\":\"0\",\"newLength\":\"1\",\"addLines\":null,\"delLines\":null,\"isMissingOldNewline\":null,\"isMissingNewNewline\":null,\"corpus\":\"+alpha\\n\"}]}],\"properties\":{\"hg:meta\":{\"branch\":\"default\",\"date\":\"0 0\",\"node\":\"53fe3a1e0f42670a88ad845247b2ed4d5e645434\",\"parent\":\"0000000000000000000000000000000000000000\",\"user\":\"test\"},\"local:commits\":{\"53fe3a1e0f42670a88ad845247b2ed4d5e645434\":{\"author\":\"test\",\"authorEmail\":\"test\",\"branch\":\"default\",\"time\":0}}},\"authorName\":\"test\",\"authorEmail\":\"test\"}},\"error_code\":null,\"error_info\":null}"
                 }
             }
         },
         {
             "request": {
-                "method": "POST",
-                "body": "constraints%5Bcallsigns%5D%5B0%5D=HG&api.token=cli-hahayouwish",
-                "uri": "https://phab.mercurial-scm.org//api/diffusion.repository.search",
                 "headers": {
+                    "user-agent": [
+                        "mercurial/proto-1.0 (Mercurial 5.0+93-d811f17090a3+20190505)"
+                    ],
+                    "content-length": [
+                        "93"
+                    ],
                     "content-type": [
                         "application/x-www-form-urlencoded"
                     ],
                     "accept": [
                         "application/mercurial-0.1"
                     ],
-                    "user-agent": [
-                        "mercurial/proto-1.0 (Mercurial 4.9+477-7c86ec0ca5c5+20190303)"
-                    ],
                     "host": [
                         "phab.mercurial-scm.org"
-                    ],
-                    "content-length": [
-                        "79"
                     ]
-                }
+                },
+                "body": "api.token=cli-hahayouwish&constraints%5Bcallsigns%5D%5B0%5D=HG",
+                "uri": "https://phab.mercurial-scm.org//api/diffusion.repository.search",
+                "method": "POST"
             },
             "response": {
                 "status": {
                     "code": 200,
                     "message": "OK"
                 },
-                "body": {
-                    "string": "{\"result\":{\"data\":[{\"id\":2,\"type\":\"REPO\",\"phid\":\"PHID-REPO-bvunnehri4u2isyr7bc3\",\"fields\":{\"name\":\"Mercurial\",\"vcs\":\"hg\",\"callsign\":\"HG\",\"shortName\":null,\"status\":\"active\",\"isImporting\":false,\"spacePHID\":null,\"dateCreated\":1498761653,\"dateModified\":1500403184,\"policy\":{\"view\":\"public\",\"edit\":\"admin\",\"diffusion.push\":\"users\"}},\"attachments\":{}}],\"maps\":{},\"query\":{\"queryKey\":null},\"cursor\":{\"limit\":100,\"after\":null,\"before\":null,\"order\":null}},\"error_code\":null,\"error_info\":null}"
-                },
                 "headers": {
-                    "expires": [
-                        "Sat, 01 Jan 2000 00:00:00 GMT"
-                    ],
                     "x-xss-protection": [
                         "1; mode=block"
                     ],
-                    "transfer-encoding": [
-                        "chunked"
-                    ],
-                    "date": [
-                        "Sun, 03 Mar 2019 00:12:31 GMT"
-                    ],
-                    "x-frame-options": [
-                        "Deny"
-                    ],
                     "cache-control": [
                         "no-store"
                     ],
                     "content-type": [
                         "application/json"
                     ],
+                    "date": [
+                        "Sun, 05 May 2019 13:31:09 GMT"
+                    ],
+                    "connection": [
+                        "keep-alive"
+                    ],
+                    "strict-transport-security": [
+                        "max-age=31536000; includeSubdomains; preload"
+                    ],
+                    "vary": [
+                        "Accept-Encoding"
+                    ],
+                    "x-frame-options": [
+                        "Deny"
+                    ],
+                    "content-length": [
+                        "549"
+                    ],
                     "x-content-type-options": [
                         "nosniff"
                     ],
-                    "server": [
-                        "Apache/2.4.10 (Debian)"
-                    ],
-                    "set-cookie": [
-                        "phsid=A%2Fmlq7cl6pakmia2uecfcevwhdl3hyqe6rdb2y7usm; expires=Fri, 01-Mar-2024 00:12:31 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly"
-                    ],
-                    "strict-transport-security": [
-                        "max-age=0; includeSubdomains; preload"
+                    "expires": [
+                        "Sat, 01 Jan 2000 00:00:00 GMT"
                     ]
+                },
+                "body": {
+                    "string": "{\"result\":{\"data\":[{\"id\":10,\"type\":\"REPO\",\"phid\":\"PHID-REPO-bvunnehri4u2isyr7bc3\",\"fields\":{\"name\":\"Mercurial\",\"vcs\":\"hg\",\"callsign\":\"HG\",\"shortName\":\"Mercurial\",\"status\":\"active\",\"isImporting\":false,\"almanacServicePHID\":null,\"spacePHID\":null,\"dateCreated\":1507817156,\"dateModified\":1529613276,\"policy\":{\"view\":\"public\",\"edit\":\"admin\",\"diffusion.push\":\"users\"}},\"attachments\":{}}],\"maps\":{},\"query\":{\"queryKey\":null},\"cursor\":{\"limit\":100,\"after\":null,\"before\":null,\"order\":null}},\"error_code\":null,\"error_info\":null}"
                 }
             }
         },
         {
             "request": {
-                "method": "POST",
-                "body": "repositoryPHID=PHID-REPO-bvunnehri4u2isyr7bc3&api.token=cli-hahayouwish&diff=diff+--git+a%2Falpha+b%2Falpha%0Anew+file+mode+100644%0A---+%2Fdev%2Fnull%0A%2B%2B%2B+b%2Falpha%0A%40%40+-0%2C0+%2B1%2C2+%40%40%0A%2Balpha%0A%2Bmore%0A",
-                "uri": "https://phab.mercurial-scm.org//api/differential.createrawdiff",
                 "headers": {
+                    "user-agent": [
+                        "mercurial/proto-1.0 (Mercurial 5.0+93-d811f17090a3+20190505)"
+                    ],
+                    "content-length": [
+                        "245"
+                    ],
                     "content-type": [
                         "application/x-www-form-urlencoded"
                     ],
                     "accept": [
                         "application/mercurial-0.1"
                     ],
-                    "user-agent": [
-                        "mercurial/proto-1.0 (Mercurial 4.9+477-7c86ec0ca5c5+20190303)"
-                    ],
                     "host": [
                         "phab.mercurial-scm.org"
-                    ],
-                    "content-length": [
-                        "245"
                     ]
-                }
+                },
+                "body": "api.token=cli-hahayouwish&diff=diff+--git+a%2Falpha+b%2Falpha%0Anew+file+mode+100644%0A---+%2Fdev%2Fnull%0A%2B%2B%2B+b%2Falpha%0A%40%40+-0%2C0+%2B1%2C2+%40%40%0A%2Balpha%0A%2Bmore%0A&repositoryPHID=PHID-REPO-bvunnehri4u2isyr7bc3",
+                "uri": "https://phab.mercurial-scm.org//api/differential.createrawdiff",
+                "method": "POST"
             },
             "response": {
                 "status": {
                     "code": 200,
                     "message": "OK"
                 },
-                "body": {
-                    "string": "{\"result\":{\"id\":14304,\"phid\":\"PHID-DIFF-3wv2fwmzp27uamb66xxg\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/differential\\/diff\\/14304\\/\"},\"error_code\":null,\"error_info\":null}"
-                },
                 "headers": {
-                    "expires": [
-                        "Sat, 01 Jan 2000 00:00:00 GMT"
-                    ],
                     "x-xss-protection": [
                         "1; mode=block"
                     ],
-                    "transfer-encoding": [
-                        "chunked"
-                    ],
-                    "date": [
-                        "Sun, 03 Mar 2019 00:12:32 GMT"
-                    ],
-                    "x-frame-options": [
-                        "Deny"
-                    ],
                     "cache-control": [
                         "no-store"
                     ],
                     "content-type": [
                         "application/json"
                     ],
+                    "date": [
+                        "Sun, 05 May 2019 13:31:09 GMT"
+                    ],
+                    "connection": [
+                        "keep-alive"
+                    ],
+                    "strict-transport-security": [
+                        "max-age=31536000; includeSubdomains; preload"
+                    ],
+                    "vary": [
+                        "Accept-Encoding"
+                    ],
+                    "x-frame-options": [
+                        "Deny"
+                    ],
+                    "content-length": [
+                        "172"
+                    ],
                     "x-content-type-options": [
                         "nosniff"
                     ],
-                    "server": [
-                        "Apache/2.4.10 (Debian)"
-                    ],
-                    "set-cookie": [
-                        "phsid=A%2Fptjtujvqlcwhzs4yhneogb323aqessc5axlu4rif; expires=Fri, 01-Mar-2024 00:12:32 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly"
-                    ],
-                    "strict-transport-security": [
-                        "max-age=0; includeSubdomains; preload"
+                    "expires": [
+                        "Sat, 01 Jan 2000 00:00:00 GMT"
                     ]
+                },
+                "body": {
+                    "string": "{\"result\":{\"id\":1900,\"phid\":\"PHID-DIFF-gra4b3ivsgebktbeoxxx\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/differential\\/diff\\/1900\\/\"},\"error_code\":null,\"error_info\":null}"
                 }
             }
         },
         {
             "request": {
-                "method": "POST",
-                "body": "diff_id=14304&data=%7B%22user%22%3A+%22test%22%2C+%22parent%22%3A+%220000000000000000000000000000000000000000%22%2C+%22node%22%3A+%22939d862f03181a366fea64a540baf0bb33f85d92%22%2C+%22date%22%3A+%220+0%22%7D&api.token=cli-hahayouwish&name=hg%3Ameta",
-                "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
                 "headers": {
+                    "user-agent": [
+                        "mercurial/proto-1.0 (Mercurial 5.0+93-d811f17090a3+20190505)"
+                    ],
+                    "content-length": [
+                        "296"
+                    ],
                     "content-type": [
                         "application/x-www-form-urlencoded"
                     ],
                     "accept": [
                         "application/mercurial-0.1"
                     ],
-                    "user-agent": [
-                        "mercurial/proto-1.0 (Mercurial 4.9+477-7c86ec0ca5c5+20190303)"
-                    ],
                     "host": [
                         "phab.mercurial-scm.org"
-                    ],
-                    "content-length": [
-                        "264"
                     ]
-                }
+                },
+                "body": "diff_id=1900&data=%7B%22branch%22%3A+%22default%22%2C+%22date%22%3A+%220+0%22%2C+%22node%22%3A+%22d940d39fb603f29ea5df4b7c15f315fe6ff4e346%22%2C+%22parent%22%3A+%220000000000000000000000000000000000000000%22%2C+%22user%22%3A+%22test%22%7D&name=hg%3Ameta&api.token=cli-hahayouwish",
+                "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
+                "method": "POST"
             },
             "response": {
                 "status": {
                     "code": 200,
                     "message": "OK"
                 },
-                "body": {
-                    "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
-                },
                 "headers": {
-                    "expires": [
-                        "Sat, 01 Jan 2000 00:00:00 GMT"
-                    ],
                     "x-xss-protection": [
                         "1; mode=block"
                     ],
-                    "transfer-encoding": [
-                        "chunked"
-                    ],
-                    "date": [
-                        "Sun, 03 Mar 2019 00:12:32 GMT"
-                    ],
-                    "x-frame-options": [
-                        "Deny"
-                    ],
                     "cache-control": [
                         "no-store"
                     ],
                     "content-type": [
                         "application/json"
                     ],
+                    "date": [
+                        "Sun, 05 May 2019 13:31:10 GMT"
+                    ],
+                    "connection": [
+                        "keep-alive"
+                    ],
+                    "strict-transport-security": [
+                        "max-age=31536000; includeSubdomains; preload"
+                    ],
+                    "vary": [
+                        "Accept-Encoding"
+                    ],
+                    "x-frame-options": [
+                        "Deny"
+                    ],
+                    "content-length": [
+                        "51"
+                    ],
                     "x-content-type-options": [
                         "nosniff"
                     ],
-                    "server": [
-                        "Apache/2.4.10 (Debian)"
-                    ],
-                    "set-cookie": [
-                        "phsid=A%2Feho2462w6mulsjeoz3e4rwgf37aekqwgpqmarn2f; expires=Fri, 01-Mar-2024 00:12:32 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly"
-                    ],
-                    "strict-transport-security": [
-                        "max-age=0; includeSubdomains; preload"
+                    "expires": [
+                        "Sat, 01 Jan 2000 00:00:00 GMT"
                     ]
+                },
+                "body": {
+                    "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
                 }
             }
         },
         {
             "request": {
-                "method": "POST",
-                "body": "diff_id=14304&data=%7B%22939d862f03181a366fea64a540baf0bb33f85d92%22%3A+%7B%22author%22%3A+%22test%22%2C+%22authorEmail%22%3A+%22test%22%2C+%22time%22%3A+0.0%7D%7D&api.token=cli-hahayouwish&name=local%3Acommits",
-                "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
                 "headers": {
+                    "user-agent": [
+                        "mercurial/proto-1.0 (Mercurial 5.0+93-d811f17090a3+20190505)"
+                    ],
+                    "content-length": [
+                        "257"
+                    ],
                     "content-type": [
                         "application/x-www-form-urlencoded"
                     ],
                     "accept": [
                         "application/mercurial-0.1"
                     ],
-                    "user-agent": [
-                        "mercurial/proto-1.0 (Mercurial 4.9+477-7c86ec0ca5c5+20190303)"
-                    ],
                     "host": [
                         "phab.mercurial-scm.org"
-                    ],
-                    "content-length": [
-                        "227"
                     ]
-                }
+                },
+                "body": "diff_id=1900&data=%7B%22d940d39fb603f29ea5df4b7c15f315fe6ff4e346%22%3A+%7B%22author%22%3A+%22test%22%2C+%22authorEmail%22%3A+%22test%22%2C+%22branch%22%3A+%22default%22%2C+%22commit%22%3A+%22d940d39fb603f29ea5df4b7c15f315fe6ff4e346%22%2C+%22parents%22%3A+%5B%220000000000000000000000000000000000000000%22%5D%2C+%22time%22%3A+0%7D%7D&name=local%3Acommits&api.token=cli-hahayouwish",
+                "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
+                "method": "POST"
             },
             "response": {
                 "status": {
                     "code": 200,
                     "message": "OK"
                 },
-                "body": {
-                    "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
-                },
                 "headers": {
-                    "expires": [
-                        "Sat, 01 Jan 2000 00:00:00 GMT"
-                    ],
                     "x-xss-protection": [
                         "1; mode=block"
                     ],
-                    "transfer-encoding": [
-                        "chunked"
-                    ],
-                    "date": [
-                        "Sun, 03 Mar 2019 00:12:33 GMT"
-                    ],
-                    "x-frame-options": [
-                        "Deny"
-                    ],
                     "cache-control": [
                         "no-store"
                     ],
                     "content-type": [
                         "application/json"
                     ],
+                    "date": [
+                        "Sun, 05 May 2019 13:31:11 GMT"
+                    ],
+                    "connection": [
+                        "keep-alive"
+                    ],
+                    "strict-transport-security": [
+                        "max-age=31536000; includeSubdomains; preload"
+                    ],
+                    "vary": [
+                        "Accept-Encoding"
+                    ],
+                    "x-frame-options": [
+                        "Deny"
+                    ],
+                    "content-length": [
+                        "51"
+                    ],
                     "x-content-type-options": [
                         "nosniff"
                     ],
-                    "server": [
-                        "Apache/2.4.10 (Debian)"
-                    ],
-                    "set-cookie": [
-                        "phsid=A%2F4ca3h5qhtwgn55t3zznczixyt2st4tm44t23aceg; expires=Fri, 01-Mar-2024 00:12:33 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly"
-                    ],
-                    "strict-transport-security": [
-                        "max-age=0; includeSubdomains; preload"
+                    "expires": [
+                        "Sat, 01 Jan 2000 00:00:00 GMT"
                     ]
+                },
+                "body": {
+                    "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
                 }
             }
         },
         {
             "request": {
-                "method": "POST",
-                "body": "api.token=cli-hahayouwish&corpus=create+alpha+for+phabricator+test+%E2%82%AC%0A%0ADifferential+Revision%3A+https%3A%2F%2Fphab.mercurial-scm.org%2FD6054",
-                "uri": "https://phab.mercurial-scm.org//api/differential.parsecommitmessage",
                 "headers": {
+                    "user-agent": [
+                        "mercurial/proto-1.0 (Mercurial 5.0+93-d811f17090a3+20190505)"
+                    ],
+                    "content-length": [
+                        "173"
+                    ],
                     "content-type": [
                         "application/x-www-form-urlencoded"
                     ],
                     "accept": [
                         "application/mercurial-0.1"
                     ],
-                    "user-agent": [
-                        "mercurial/proto-1.0 (Mercurial 4.9+477-7c86ec0ca5c5+20190303)"
-                    ],
                     "host": [
                         "phab.mercurial-scm.org"
-                    ],
-                    "content-length": [
-                        "168"
                     ]
-                }
+                },
+                "body": "api.token=cli-hahayouwish&corpus=create+alpha+for+phabricator+test+%E2%82%AC%0A%0ADifferential+Revision%3A+https%3A%2F%2Fphab.mercurial-scm.org%2FD1190",
+                "uri": "https://phab.mercurial-scm.org//api/differential.parsecommitmessage",
+                "method": "POST"
             },
             "response": {
                 "status": {
                     "code": 200,
                     "message": "OK"
                 },
-                "body": {
-                    "string": "{\"result\":{\"errors\":[],\"fields\":{\"title\":\"create alpha for phabricator test \\u20ac\",\"revisionID\":6054},\"revisionIDFieldInfo\":{\"value\":6054,\"validDomain\":\"https:\\/\\/phab.mercurial-scm.org\"}},\"error_code\":null,\"error_info\":null}"
-                },
                 "headers": {
-                    "expires": [
-                        "Sat, 01 Jan 2000 00:00:00 GMT"
-                    ],
                     "x-xss-protection": [
                         "1; mode=block"
                     ],
-                    "transfer-encoding": [
-                        "chunked"
-                    ],
-                    "date": [
-                        "Sun, 03 Mar 2019 00:12:34 GMT"
-                    ],
-                    "x-frame-options": [
-                        "Deny"
-                    ],
                     "cache-control": [
                         "no-store"
                     ],
                     "content-type": [
                         "application/json"
                     ],
+                    "date": [
+                        "Sun, 05 May 2019 13:31:11 GMT"
+                    ],
+                    "connection": [
+                        "keep-alive"
+                    ],
+                    "strict-transport-security": [
+                        "max-age=31536000; includeSubdomains; preload"
+                    ],
+                    "vary": [
+                        "Accept-Encoding"
+                    ],
+                    "x-frame-options": [
+                        "Deny"
+                    ],
+                    "content-length": [
+                        "316"
+                    ],
                     "x-content-type-options": [
                         "nosniff"
                     ],
-                    "server": [
-                        "Apache/2.4.10 (Debian)"
-                    ],
-                    "set-cookie": [
-                        "phsid=A%2F7pvtbpw2waiblbsbydew3vfpulqnccf4647ymipq; expires=Fri, 01-Mar-2024 00:12:34 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly"
-                    ],
-                    "strict-transport-security": [
-                        "max-age=0; includeSubdomains; preload"
+                    "expires": [
+                        "Sat, 01 Jan 2000 00:00:00 GMT"
                     ]
+                },
+                "body": {
+                    "string": "{\"result\":{\"errors\":[],\"fields\":{\"title\":\"create alpha for phabricator test \\u20ac\",\"revisionID\":1190},\"revisionIDFieldInfo\":{\"value\":1190,\"validDomain\":\"https:\\/\\/phab.mercurial-scm.org\"},\"transactions\":[{\"type\":\"title\",\"value\":\"create alpha for phabricator test \\u20ac\"}]},\"error_code\":null,\"error_info\":null}"
                 }
             }
         },
         {
             "request": {
-                "method": "POST",
-                "body": "api.token=cli-hahayouwish&transactions%5B0%5D%5Btype%5D=update&transactions%5B0%5D%5Bvalue%5D=PHID-DIFF-3wv2fwmzp27uamb66xxg&transactions%5B1%5D%5Btype%5D=title&transactions%5B1%5D%5Bvalue%5D=create+alpha+for+phabricator+test+%E2%82%AC&objectIdentifier=6054",
-                "uri": "https://phab.mercurial-scm.org//api/differential.revision.edit",
                 "headers": {
+                    "user-agent": [
+                        "mercurial/proto-1.0 (Mercurial 5.0+93-d811f17090a3+20190505)"
+                    ],
+                    "content-length": [
+                        "274"
+                    ],
                     "content-type": [
                         "application/x-www-form-urlencoded"
                     ],
                     "accept": [
                         "application/mercurial-0.1"
                     ],
-                    "user-agent": [
-                        "mercurial/proto-1.0 (Mercurial 4.9+477-7c86ec0ca5c5+20190303)"
-                    ],
                     "host": [
                         "phab.mercurial-scm.org"
-                    ],
-                    "content-length": [
-                        "274"
                     ]
-                }
+                },
+                "body": "api.token=cli-hahayouwish&objectIdentifier=1190&transactions%5B0%5D%5Btype%5D=update&transactions%5B0%5D%5Bvalue%5D=PHID-DIFF-gra4b3ivsgebktbeoxxx&transactions%5B1%5D%5Btype%5D=title&transactions%5B1%5D%5Bvalue%5D=create+alpha+for+phabricator+test+%E2%82%AC",
+                "uri": "https://phab.mercurial-scm.org//api/differential.revision.edit",
+                "method": "POST"
             },
             "response": {
                 "status": {
                     "code": 200,
                     "message": "OK"
                 },
-                "body": {
-                    "string": "{\"result\":{\"object\":{\"id\":\"6054\",\"phid\":\"PHID-DREV-6pczsbtdpqjc2nskmxwy\"},\"transactions\":[{\"phid\":\"PHID-XACT-DREV-mc2gfyoyhkfz7dy\"}]},\"error_code\":null,\"error_info\":null}"
-                },
                 "headers": {
-                    "expires": [
-                        "Sat, 01 Jan 2000 00:00:00 GMT"
-                    ],
                     "x-xss-protection": [
                         "1; mode=block"
                     ],
-                    "transfer-encoding": [
-                        "chunked"
-                    ],
-                    "date": [
-                        "Sun, 03 Mar 2019 00:12:34 GMT"
-                    ],
-                    "x-frame-options": [
-                        "Deny"
-                    ],
                     "cache-control": [
                         "no-store"
                     ],
                     "content-type": [
                         "application/json"
                     ],
+                    "date": [
+                        "Sun, 05 May 2019 13:31:12 GMT"
+                    ],
+                    "connection": [
+                        "keep-alive"
+                    ],
+                    "strict-transport-security": [
+                        "max-age=31536000; includeSubdomains; preload"
+                    ],
+                    "vary": [
+                        "Accept-Encoding"
+                    ],
+                    "x-frame-options": [
+                        "Deny"
+                    ],
+                    "content-length": [
+                        "168"
+                    ],
                     "x-content-type-options": [
                         "nosniff"
                     ],
-                    "server": [
-                        "Apache/2.4.10 (Debian)"
-                    ],
-                    "set-cookie": [
-                        "phsid=A%2Fhmyuw3lg6h4joaswqnfcmnzdkp6p2qxotsvahb7l; expires=Fri, 01-Mar-2024 00:12:34 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly"
-                    ],
-                    "strict-transport-security": [
-                        "max-age=0; includeSubdomains; preload"
+                    "expires": [
+                        "Sat, 01 Jan 2000 00:00:00 GMT"
                     ]
+                },
+                "body": {
+                    "string": "{\"result\":{\"object\":{\"id\":1190,\"phid\":\"PHID-DREV-kikesmfxhzpfaxbzgj3l\"},\"transactions\":[{\"phid\":\"PHID-XACT-DREV-tk6ciodgzlwo2v6\"}]},\"error_code\":null,\"error_info\":null}"
                 }
             }
         },
         {
             "request": {
-                "method": "POST",
-                "body": "repositoryPHID=PHID-REPO-bvunnehri4u2isyr7bc3&api.token=cli-hahayouwish&diff=diff+--git+a%2Fbeta+b%2Fbeta%0Anew+file+mode+100644%0A---+%2Fdev%2Fnull%0A%2B%2B%2B+b%2Fbeta%0A%40%40+-0%2C0+%2B1%2C1+%40%40%0A%2Bbeta%0A",
-                "uri": "https://phab.mercurial-scm.org//api/differential.createrawdiff",
                 "headers": {
+                    "user-agent": [
+                        "mercurial/proto-1.0 (Mercurial 5.0+93-d811f17090a3+20190505)"
+                    ],
+                    "content-length": [
+                        "231"
+                    ],
                     "content-type": [
                         "application/x-www-form-urlencoded"
                     ],
                     "accept": [
                         "application/mercurial-0.1"
                     ],
-                    "user-agent": [
-                        "mercurial/proto-1.0 (Mercurial 4.9+477-7c86ec0ca5c5+20190303)"
-                    ],
                     "host": [
                         "phab.mercurial-scm.org"
-                    ],
-                    "content-length": [
-                        "231"
                     ]
-                }
+                },
+                "body": "api.token=cli-hahayouwish&diff=diff+--git+a%2Fbeta+b%2Fbeta%0Anew+file+mode+100644%0A---+%2Fdev%2Fnull%0A%2B%2B%2B+b%2Fbeta%0A%40%40+-0%2C0+%2B1%2C1+%40%40%0A%2Bbeta%0A&repositoryPHID=PHID-REPO-bvunnehri4u2isyr7bc3",
+                "uri": "https://phab.mercurial-scm.org//api/differential.createrawdiff",
+                "method": "POST"
             },
             "response": {
                 "status": {
                     "code": 200,
                     "message": "OK"
                 },
-                "body": {
-                    "string": "{\"result\":{\"id\":14305,\"phid\":\"PHID-DIFF-pofynzhmmqm2czm33teg\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/differential\\/diff\\/14305\\/\"},\"error_code\":null,\"error_info\":null}"
-                },
                 "headers": {
-                    "expires": [
-                        "Sat, 01 Jan 2000 00:00:00 GMT"
-                    ],
                     "x-xss-protection": [
                         "1; mode=block"
                     ],
-                    "transfer-encoding": [
-                        "chunked"
-                    ],
-                    "date": [
-                        "Sun, 03 Mar 2019 00:12:35 GMT"
-                    ],
-                    "x-frame-options": [
-                        "Deny"
-                    ],
                     "cache-control": [
                         "no-store"
                     ],
                     "content-type": [
                         "application/json"
                     ],
+                    "date": [
+                        "Sun, 05 May 2019 13:31:13 GMT"
+                    ],
+                    "connection": [
+                        "keep-alive"
+                    ],
+                    "strict-transport-security": [
+                        "max-age=31536000; includeSubdomains; preload"
+                    ],
+                    "vary": [
+                        "Accept-Encoding"
+                    ],
+                    "x-frame-options": [
+                        "Deny"
+                    ],
+                    "content-length": [
+                        "172"
+                    ],
                     "x-content-type-options": [
                         "nosniff"
                     ],
-                    "server": [
-                        "Apache/2.4.10 (Debian)"
-                    ],
-                    "set-cookie": [
-                        "phsid=A%2F2xpzt6bryn7n3gug3ll7iu2gfqyy4zss5d7nolew; expires=Fri, 01-Mar-2024 00:12:35 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly"
-                    ],
-                    "strict-transport-security": [
-                        "max-age=0; includeSubdomains; preload"
+                    "expires": [
+                        "Sat, 01 Jan 2000 00:00:00 GMT"
                     ]
+                },
+                "body": {
+                    "string": "{\"result\":{\"id\":1901,\"phid\":\"PHID-DIFF-uhbyhoejzbniwwzj2q5c\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/differential\\/diff\\/1901\\/\"},\"error_code\":null,\"error_info\":null}"
                 }
             }
         },
         {
             "request": {
-                "method": "POST",
-                "body": "diff_id=14305&data=%7B%22user%22%3A+%22test%22%2C+%22parent%22%3A+%22939d862f03181a366fea64a540baf0bb33f85d92%22%2C+%22node%22%3A+%22f55f947ed0f8ad80a04b7e87a0bf9febda2070b1%22%2C+%22date%22%3A+%220+0%22%7D&api.token=cli-hahayouwish&name=hg%3Ameta",
-                "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
                 "headers": {
+                    "user-agent": [
+                        "mercurial/proto-1.0 (Mercurial 5.0+93-d811f17090a3+20190505)"
+                    ],
+                    "content-length": [
+                        "296"
+                    ],
                     "content-type": [
                         "application/x-www-form-urlencoded"
                     ],
                     "accept": [
                         "application/mercurial-0.1"
                     ],
-                    "user-agent": [
-                        "mercurial/proto-1.0 (Mercurial 4.9+477-7c86ec0ca5c5+20190303)"
-                    ],
                     "host": [
                         "phab.mercurial-scm.org"
-                    ],
-                    "content-length": [
-                        "264"
                     ]
-                }
+                },
+                "body": "diff_id=1901&data=%7B%22branch%22%3A+%22default%22%2C+%22date%22%3A+%220+0%22%2C+%22node%22%3A+%224b2486dfc8c7b238e70f8b022f9e09a0ea220415%22%2C+%22parent%22%3A+%22d940d39fb603f29ea5df4b7c15f315fe6ff4e346%22%2C+%22user%22%3A+%22test%22%7D&name=hg%3Ameta&api.token=cli-hahayouwish",
+                "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
+                "method": "POST"
             },
             "response": {
                 "status": {
                     "code": 200,
                     "message": "OK"
                 },
-                "body": {
-                    "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
-                },
                 "headers": {
-                    "expires": [
-                        "Sat, 01 Jan 2000 00:00:00 GMT"
-                    ],
                     "x-xss-protection": [
                         "1; mode=block"
                     ],
-                    "transfer-encoding": [
-                        "chunked"
-                    ],
-                    "date": [
-                        "Sun, 03 Mar 2019 00:12:36 GMT"
-                    ],
-                    "x-frame-options": [
-                        "Deny"
-                    ],
                     "cache-control": [
                         "no-store"
                     ],
                     "content-type": [
                         "application/json"
                     ],
+                    "date": [
+                        "Sun, 05 May 2019 13:31:14 GMT"
+                    ],
+                    "connection": [
+                        "keep-alive"
+                    ],
+                    "strict-transport-security": [
+                        "max-age=31536000; includeSubdomains; preload"
+                    ],
+                    "vary": [
+                        "Accept-Encoding"
+                    ],
+                    "x-frame-options": [
+                        "Deny"
+                    ],
+                    "content-length": [
+                        "51"
+                    ],
                     "x-content-type-options": [
                         "nosniff"
                     ],
-                    "server": [
-                        "Apache/2.4.10 (Debian)"
-                    ],
-                    "set-cookie": [
-                        "phsid=A%2Fygzbpe74xh6shrejkd3tj32t4gaqnvumy63iudrd; expires=Fri, 01-Mar-2024 00:12:36 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly"
-                    ],
-                    "strict-transport-security": [
-                        "max-age=0; includeSubdomains; preload"
+                    "expires": [
+                        "Sat, 01 Jan 2000 00:00:00 GMT"
                     ]
+                },
+                "body": {
+                    "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
                 }
             }
         },
         {
             "request": {
-                "method": "POST",
-                "body": "diff_id=14305&data=%7B%22f55f947ed0f8ad80a04b7e87a0bf9febda2070b1%22%3A+%7B%22author%22%3A+%22test%22%2C+%22authorEmail%22%3A+%22test%22%2C+%22time%22%3A+0.0%7D%7D&api.token=cli-hahayouwish&name=local%3Acommits",
-                "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
                 "headers": {
+                    "user-agent": [
+                        "mercurial/proto-1.0 (Mercurial 5.0+93-d811f17090a3+20190505)"
+                    ],
+                    "content-length": [
+                        "257"
+                    ],
                     "content-type": [
                         "application/x-www-form-urlencoded"
                     ],
                     "accept": [
                         "application/mercurial-0.1"
                     ],
-                    "user-agent": [
-                        "mercurial/proto-1.0 (Mercurial 4.9+477-7c86ec0ca5c5+20190303)"
-                    ],
                     "host": [
                         "phab.mercurial-scm.org"
-                    ],
-                    "content-length": [
-                        "227"
                     ]
-                }
+                },
+                "body": "diff_id=1901&data=%7B%224b2486dfc8c7b238e70f8b022f9e09a0ea220415%22%3A+%7B%22author%22%3A+%22test%22%2C+%22authorEmail%22%3A+%22test%22%2C+%22branch%22%3A+%22default%22%2C+%22commit%22%3A+%224b2486dfc8c7b238e70f8b022f9e09a0ea220415%22%2C+%22parents%22%3A+%5B%22d940d39fb603f29ea5df4b7c15f315fe6ff4e346%22%5D%2C+%22time%22%3A+0%7D%7D&name=local%3Acommits&api.token=cli-hahayouwish",
+                "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
+                "method": "POST"
             },
             "response": {
                 "status": {
                     "code": 200,
                     "message": "OK"
                 },
-                "body": {
-                    "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
-                },
                 "headers": {
-                    "expires": [
-                        "Sat, 01 Jan 2000 00:00:00 GMT"
-                    ],
                     "x-xss-protection": [
                         "1; mode=block"
                     ],
-                    "transfer-encoding": [
-                        "chunked"
-                    ],
-                    "date": [
-                        "Sun, 03 Mar 2019 00:12:37 GMT"
-                    ],
-                    "x-frame-options": [
-                        "Deny"
-                    ],
                     "cache-control": [
                         "no-store"
                     ],
                     "content-type": [
                         "application/json"
                     ],
+                    "date": [
+                        "Sun, 05 May 2019 13:31:15 GMT"
+                    ],
+                    "connection": [
+                        "keep-alive"
+                    ],
+                    "strict-transport-security": [
+                        "max-age=31536000; includeSubdomains; preload"
+                    ],
+                    "vary": [
+                        "Accept-Encoding"
+                    ],
+                    "x-frame-options": [
+                        "Deny"
+                    ],
+                    "content-length": [
+                        "51"
+                    ],
                     "x-content-type-options": [
                         "nosniff"
                     ],
-                    "server": [
-                        "Apache/2.4.10 (Debian)"
-                    ],
-                    "set-cookie": [
-                        "phsid=A%2Fgw67yfcsx7vvxkymeac52ca5is4jkxjwqqkhayco; expires=Fri, 01-Mar-2024 00:12:37 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly"
-                    ],
-                    "strict-transport-security": [
-                        "max-age=0; includeSubdomains; preload"
+                    "expires": [
+                        "Sat, 01 Jan 2000 00:00:00 GMT"
                     ]
+                },
+                "body": {
+                    "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
                 }
             }
         },
         {
             "request": {
-                "method": "POST",
-                "body": "api.token=cli-hahayouwish&corpus=create+beta+for+phabricator+test",
-                "uri": "https://phab.mercurial-scm.org//api/differential.parsecommitmessage",
                 "headers": {
+                    "user-agent": [
+                        "mercurial/proto-1.0 (Mercurial 5.0+93-d811f17090a3+20190505)"
+                    ],
+                    "content-length": [
+                        "82"
+                    ],
                     "content-type": [
                         "application/x-www-form-urlencoded"
                     ],
                     "accept": [
                         "application/mercurial-0.1"
                     ],
-                    "user-agent": [
-                        "mercurial/proto-1.0 (Mercurial 4.9+477-7c86ec0ca5c5+20190303)"
-                    ],
                     "host": [
                         "phab.mercurial-scm.org"
-                    ],
-                    "content-length": [
-                        "82"
                     ]
-                }
+                },
+                "body": "api.token=cli-hahayouwish&corpus=create+beta+for+phabricator+test",
+                "uri": "https://phab.mercurial-scm.org//api/differential.parsecommitmessage",
+                "method": "POST"
             },
             "response": {
                 "status": {
                     "code": 200,
                     "message": "OK"
                 },
-                "body": {
-                    "string": "{\"result\":{\"errors\":[],\"fields\":{\"title\":\"create beta for phabricator test\"},\"revisionIDFieldInfo\":{\"value\":null,\"validDomain\":\"https:\\/\\/phab.mercurial-scm.org\"}},\"error_code\":null,\"error_info\":null}"
-                },
                 "headers": {
-                    "expires": [
-                        "Sat, 01 Jan 2000 00:00:00 GMT"
-                    ],
                     "x-xss-protection": [
                         "1; mode=block"
                     ],
-                    "transfer-encoding": [
-                        "chunked"
-                    ],
-                    "date": [
-                        "Sun, 03 Mar 2019 00:12:37 GMT"
-                    ],
-                    "x-frame-options": [
-                        "Deny"
-                    ],
                     "cache-control": [
                         "no-store"
                     ],
                     "content-type": [
                         "application/json"
                     ],
+                    "date": [
+                        "Sun, 05 May 2019 13:31:15 GMT"
+                    ],
+                    "connection": [
+                        "keep-alive"
+                    ],
+                    "strict-transport-security": [
+                        "max-age=31536000; includeSubdomains; preload"
+                    ],
+                    "vary": [
+                        "Accept-Encoding"
+                    ],
+                    "x-frame-options": [
+                        "Deny"
+                    ],
+                    "content-length": [
+                        "282"
+                    ],
                     "x-content-type-options": [
                         "nosniff"
                     ],
-                    "server": [
-                        "Apache/2.4.10 (Debian)"
-                    ],
-                    "set-cookie": [
-                        "phsid=A%2Fyt5ejs6pgvjdxzms7geaxup63jpqkisngu3cprk6; expires=Fri, 01-Mar-2024 00:12:37 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly"
-                    ],
-                    "strict-transport-security": [
-                        "max-age=0; includeSubdomains; preload"
+                    "expires": [
+                        "Sat, 01 Jan 2000 00:00:00 GMT"
                     ]
+                },
+                "body": {
+                    "string": "{\"result\":{\"errors\":[],\"fields\":{\"title\":\"create beta for phabricator test\"},\"revisionIDFieldInfo\":{\"value\":null,\"validDomain\":\"https:\\/\\/phab.mercurial-scm.org\"},\"transactions\":[{\"type\":\"title\",\"value\":\"create beta for phabricator test\"}]},\"error_code\":null,\"error_info\":null}"
                 }
             }
         },
         {
             "request": {
-                "method": "POST",
-                "body": "transactions%5B0%5D%5Btype%5D=update&transactions%5B0%5D%5Bvalue%5D=PHID-DIFF-pofynzhmmqm2czm33teg&transactions%5B1%5D%5Btype%5D=summary&transactions%5B1%5D%5Bvalue%5D=Depends+on+D6054&transactions%5B2%5D%5Btype%5D=summary&transactions%5B2%5D%5Bvalue%5D=+&transactions%5B3%5D%5Btype%5D=title&transactions%5B3%5D%5Bvalue%5D=create+beta+for+phabricator+test&api.token=cli-hahayouwish",
-                "uri": "https://phab.mercurial-scm.org//api/differential.revision.edit",
                 "headers": {
+                    "user-agent": [
+                        "mercurial/proto-1.0 (Mercurial 5.0+93-d811f17090a3+20190505)"
+                    ],
+                    "content-length": [
+                        "398"
+                    ],
                     "content-type": [
                         "application/x-www-form-urlencoded"
                     ],
                     "accept": [
                         "application/mercurial-0.1"
                     ],
-                    "user-agent": [
-                        "mercurial/proto-1.0 (Mercurial 4.9+477-7c86ec0ca5c5+20190303)"
-                    ],
                     "host": [
                         "phab.mercurial-scm.org"
-                    ],
-                    "content-length": [
-                        "398"
                     ]
-                }
+                },
+                "body": "api.token=cli-hahayouwish&transactions%5B0%5D%5Btype%5D=update&transactions%5B0%5D%5Bvalue%5D=PHID-DIFF-uhbyhoejzbniwwzj2q5c&transactions%5B1%5D%5Btype%5D=summary&transactions%5B1%5D%5Bvalue%5D=Depends+on+D1190&transactions%5B2%5D%5Btype%5D=summary&transactions%5B2%5D%5Bvalue%5D=+&transactions%5B3%5D%5Btype%5D=title&transactions%5B3%5D%5Bvalue%5D=create+beta+for+phabricator+test",
+                "uri": "https://phab.mercurial-scm.org//api/differential.revision.edit",
+                "method": "POST"
             },
             "response": {
                 "status": {
                     "code": 200,
                     "message": "OK"
                 },
-                "body": {
-                    "string": "{\"result\":{\"object\":{\"id\":6055,\"phid\":\"PHID-DREV-k2hin2iytzuvu3j5icm3\"},\"transactions\":[{\"phid\":\"PHID-XACT-DREV-3xjvwemev7dqsj3\"},{\"phid\":\"PHID-XACT-DREV-giypqlavgemr56i\"},{\"phid\":\"PHID-XACT-DREV-tcfqd4aj6rxtxzz\"},{\"phid\":\"PHID-XACT-DREV-2timgnudaxeln7a\"},{\"phid\":\"PHID-XACT-DREV-vb6564lrsxpsw4l\"},{\"phid\":\"PHID-XACT-DREV-maym4xi2tdhysvo\"},{\"phid\":\"PHID-XACT-DREV-bna5heyckxkk5ke\"},{\"phid\":\"PHID-XACT-DREV-b2eig3stbdic7k7\"}]},\"error_code\":null,\"error_info\":null}"
-                },
                 "headers": {
-                    "expires": [
-                        "Sat, 01 Jan 2000 00:00:00 GMT"
-                    ],
                     "x-xss-protection": [
                         "1; mode=block"
                     ],
-                    "transfer-encoding": [
-                        "chunked"
-                    ],
-                    "date": [
-                        "Sun, 03 Mar 2019 00:12:38 GMT"
-                    ],
-                    "x-frame-options": [
-                        "Deny"
-                    ],
                     "cache-control": [
                         "no-store"
                     ],
                     "content-type": [
                         "application/json"
                     ],
+                    "date": [
+                        "Sun, 05 May 2019 13:31:17 GMT"
+                    ],
+                    "connection": [
+                        "keep-alive"
+                    ],
+                    "strict-transport-security": [
+                        "max-age=31536000; includeSubdomains; preload"
+                    ],
+                    "vary": [
+                        "Accept-Encoding"
+                    ],
+                    "x-frame-options": [
+                        "Deny"
+                    ],
+                    "content-length": [
+                        "420"
+                    ],
                     "x-content-type-options": [
                         "nosniff"
                     ],
-                    "server": [
-                        "Apache/2.4.10 (Debian)"
-                    ],
-                    "set-cookie": [
-                        "phsid=A%2Fgqyrj3op7rar26t6crqlt6rpdsxcefnrofqkw5rt; expires=Fri, 01-Mar-2024 00:12:38 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly"
-                    ],
-                    "strict-transport-security": [
-                        "max-age=0; includeSubdomains; preload"
+                    "expires": [
+                        "Sat, 01 Jan 2000 00:00:00 GMT"
                     ]
+                },
+                "body": {
+                    "string": "{\"result\":{\"object\":{\"id\":1191,\"phid\":\"PHID-DREV-uuyrww2k3weorn2jwcaz\"},\"transactions\":[{\"phid\":\"PHID-XACT-DREV-erc62kc5d5t53dw\"},{\"phid\":\"PHID-XACT-DREV-56jxoj2nev5we3e\"},{\"phid\":\"PHID-XACT-DREV-cajnfsuigdcmfpn\"},{\"phid\":\"PHID-XACT-DREV-expntfzlv44va6h\"},{\"phid\":\"PHID-XACT-DREV-hzrgd55fpfjcan7\"},{\"phid\":\"PHID-XACT-DREV-v4baqr7c5ydtltr\"},{\"phid\":\"PHID-XACT-DREV-ge6dwwrvrkluq2q\"}]},\"error_code\":null,\"error_info\":null}"
                 }
             }
         },
         {
             "request": {
-                "method": "POST",
-                "body": "api.token=cli-hahayouwish&ids%5B0%5D=6054&ids%5B1%5D=6055",
-                "uri": "https://phab.mercurial-scm.org//api/differential.query",
                 "headers": {
+                    "user-agent": [
+                        "mercurial/proto-1.0 (Mercurial 5.0+93-d811f17090a3+20190505)"
+                    ],
+                    "content-length": [
+                        "74"
+                    ],
                     "content-type": [
                         "application/x-www-form-urlencoded"
                     ],
                     "accept": [
                         "application/mercurial-0.1"
                     ],
-                    "user-agent": [
-                        "mercurial/proto-1.0 (Mercurial 4.9+477-7c86ec0ca5c5+20190303)"
-                    ],
                     "host": [
                         "phab.mercurial-scm.org"
-                    ],
-                    "content-length": [
-                        "74"
                     ]
-                }
+                },
+                "body": "ids%5B0%5D=1190&ids%5B1%5D=1191&api.token=cli-hahayouwish",
+                "uri": "https://phab.mercurial-scm.org//api/differential.query",
+                "method": "POST"
             },
             "response": {
                 "status": {
                     "code": 200,
                     "message": "OK"
                 },
-                "body": {
-                    "string": "{\"result\":[{\"id\":\"6055\",\"phid\":\"PHID-DREV-k2hin2iytzuvu3j5icm3\",\"title\":\"create beta for phabricator test\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/D6055\",\"dateCreated\":\"1551571958\",\"dateModified\":\"1551571958\",\"authorPHID\":\"PHID-USER-5iy6mkoveguhm2zthvww\",\"status\":\"0\",\"statusName\":\"Needs Review\",\"properties\":[],\"branch\":null,\"summary\":\" \",\"testPlan\":\"\",\"lineCount\":\"1\",\"activeDiffPHID\":\"PHID-DIFF-pofynzhmmqm2czm33teg\",\"diffs\":[\"14305\"],\"commits\":[],\"reviewers\":{\"PHID-PROJ-3dvcxzznrjru2xmmses3\":\"PHID-PROJ-3dvcxzznrjru2xmmses3\"},\"ccs\":[\"PHID-USER-q42dn7cc3donqriafhjx\"],\"hashes\":[],\"auxiliary\":{\"phabricator:projects\":[],\"phabricator:depends-on\":[\"PHID-DREV-6pczsbtdpqjc2nskmxwy\"]},\"repositoryPHID\":\"PHID-REPO-bvunnehri4u2isyr7bc3\",\"sourcePath\":null},{\"id\":\"6054\",\"phid\":\"PHID-DREV-6pczsbtdpqjc2nskmxwy\",\"title\":\"create alpha for phabricator test \\u20ac\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/D6054\",\"dateCreated\":\"1551571947\",\"dateModified\":\"1551571958\",\"authorPHID\":\"PHID-USER-5iy6mkoveguhm2zthvww\",\"status\":\"0\",\"statusName\":\"Needs Review\",\"properties\":[],\"branch\":null,\"summary\":\"\",\"testPlan\":\"\",\"lineCount\":\"2\",\"activeDiffPHID\":\"PHID-DIFF-3wv2fwmzp27uamb66xxg\",\"diffs\":[\"14304\",\"14303\"],\"commits\":[],\"reviewers\":{\"PHID-PROJ-3dvcxzznrjru2xmmses3\":\"PHID-PROJ-3dvcxzznrjru2xmmses3\"},\"ccs\":[\"PHID-USER-q42dn7cc3donqriafhjx\"],\"hashes\":[],\"auxiliary\":{\"phabricator:projects\":[],\"phabricator:depends-on\":[]},\"repositoryPHID\":\"PHID-REPO-bvunnehri4u2isyr7bc3\",\"sourcePath\":null}],\"error_code\":null,\"error_info\":null}"
-                },
                 "headers": {
-                    "expires": [
-                        "Sat, 01 Jan 2000 00:00:00 GMT"
-                    ],
                     "x-xss-protection": [
                         "1; mode=block"
                     ],
-                    "transfer-encoding": [
-                        "chunked"
-                    ],
-                    "date": [
-                        "Sun, 03 Mar 2019 00:12:39 GMT"
-                    ],
-                    "x-frame-options": [
-                        "Deny"
-                    ],
                     "cache-control": [
                         "no-store"
                     ],
                     "content-type": [
                         "application/json"
                     ],
+                    "date": [
+                        "Sun, 05 May 2019 13:31:17 GMT"
+                    ],
+                    "connection": [
+                        "keep-alive"
+                    ],
+                    "strict-transport-security": [
+                        "max-age=31536000; includeSubdomains; preload"
+                    ],
+                    "vary": [
+                        "Accept-Encoding"
+                    ],
+                    "x-frame-options": [
+                        "Deny"
+                    ],
+                    "content-length": [
+                        "1514"
+                    ],
                     "x-content-type-options": [
                         "nosniff"
                     ],
-                    "server": [
-                        "Apache/2.4.10 (Debian)"
-                    ],
-                    "set-cookie": [
-                        "phsid=A%2F5wxg6sdf2mby5iljd5e5qpgoex6uefo5pgltav7k; expires=Fri, 01-Mar-2024 00:12:39 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly"
-                    ],
-                    "strict-transport-security": [
-                        "max-age=0; includeSubdomains; preload"
+                    "expires": [
+                        "Sat, 01 Jan 2000 00:00:00 GMT"
                     ]
+                },
+                "body": {
+                    "string": "{\"result\":[{\"id\":\"1191\",\"phid\":\"PHID-DREV-uuyrww2k3weorn2jwcaz\",\"title\":\"create beta for phabricator test\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/D1191\",\"dateCreated\":\"1557063076\",\"dateModified\":\"1557063077\",\"authorPHID\":\"PHID-USER-qmzis76vb2yh3ogldu6r\",\"status\":\"0\",\"statusName\":\"Draft\",\"properties\":{\"draft.broadcast\":false,\"lines.added\":1,\"lines.removed\":0},\"branch\":null,\"summary\":\" \",\"testPlan\":\"\",\"lineCount\":\"1\",\"activeDiffPHID\":\"PHID-DIFF-uhbyhoejzbniwwzj2q5c\",\"diffs\":[\"1901\"],\"commits\":[],\"reviewers\":[],\"ccs\":[],\"hashes\":[],\"auxiliary\":{\"bugzilla.bug-id\":null,\"phabricator:projects\":[\"PHID-PROJ-f2a3wl5wxtqdtfgdjqzk\"],\"phabricator:depends-on\":[\"PHID-DREV-kikesmfxhzpfaxbzgj3l\"]},\"repositoryPHID\":\"PHID-REPO-bvunnehri4u2isyr7bc3\",\"sourcePath\":null},{\"id\":\"1190\",\"phid\":\"PHID-DREV-kikesmfxhzpfaxbzgj3l\",\"title\":\"create alpha for phabricator test \\u20ac\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/D1190\",\"dateCreated\":\"1557063064\",\"dateModified\":\"1557063076\",\"authorPHID\":\"PHID-USER-qmzis76vb2yh3ogldu6r\",\"status\":\"0\",\"statusName\":\"Needs Review\",\"properties\":{\"draft.broadcast\":true,\"lines.added\":2,\"lines.removed\":0},\"branch\":null,\"summary\":\"\",\"testPlan\":\"\",\"lineCount\":\"2\",\"activeDiffPHID\":\"PHID-DIFF-gra4b3ivsgebktbeoxxx\",\"diffs\":[\"1900\",\"1899\"],\"commits\":[],\"reviewers\":[],\"ccs\":[],\"hashes\":[],\"auxiliary\":{\"bugzilla.bug-id\":null,\"phabricator:projects\":[],\"phabricator:depends-on\":[]},\"repositoryPHID\":\"PHID-REPO-bvunnehri4u2isyr7bc3\",\"sourcePath\":null}],\"error_code\":null,\"error_info\":null}"
                 }
             }
         },
         {
             "request": {
-                "method": "POST",
-                "body": "diff_id=14305&data=%7B%22user%22%3A+%22test%22%2C+%22parent%22%3A+%22939d862f03181a366fea64a540baf0bb33f85d92%22%2C+%22node%22%3A+%229c64e1fc33e1b9a70eb60643fe96a4d5badad9dc%22%2C+%22date%22%3A+%220+0%22%7D&api.token=cli-hahayouwish&name=hg%3Ameta",
-                "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
                 "headers": {
+                    "user-agent": [
+                        "mercurial/proto-1.0 (Mercurial 5.0+93-d811f17090a3+20190505)"
+                    ],
+                    "content-length": [
+                        "296"
+                    ],
                     "content-type": [
                         "application/x-www-form-urlencoded"
                     ],
                     "accept": [
                         "application/mercurial-0.1"
                     ],
-                    "user-agent": [
-                        "mercurial/proto-1.0 (Mercurial 4.9+477-7c86ec0ca5c5+20190303)"
-                    ],
                     "host": [
                         "phab.mercurial-scm.org"
-                    ],
-                    "content-length": [
-                        "264"
                     ]
-                }
+                },
+                "body": "diff_id=1901&data=%7B%22branch%22%3A+%22default%22%2C+%22date%22%3A+%220+0%22%2C+%22node%22%3A+%222837deb84f4ab1315c1197b8aef10c620465e352%22%2C+%22parent%22%3A+%22d940d39fb603f29ea5df4b7c15f315fe6ff4e346%22%2C+%22user%22%3A+%22test%22%7D&name=hg%3Ameta&api.token=cli-hahayouwish",
+                "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
+                "method": "POST"
             },
             "response": {
                 "status": {
                     "code": 200,
                     "message": "OK"
                 },
-                "body": {
-                    "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
-                },
                 "headers": {
-                    "expires": [
-                        "Sat, 01 Jan 2000 00:00:00 GMT"
-                    ],
                     "x-xss-protection": [
                         "1; mode=block"
                     ],
-                    "transfer-encoding": [
-                        "chunked"
-                    ],
-                    "date": [
-                        "Sun, 03 Mar 2019 00:12:40 GMT"
-                    ],
-                    "x-frame-options": [
-                        "Deny"
-                    ],
                     "cache-control": [
                         "no-store"
                     ],
                     "content-type": [
                         "application/json"
                     ],
+                    "date": [
+                        "Sun, 05 May 2019 13:31:18 GMT"
+                    ],
+                    "connection": [
+                        "keep-alive"
+                    ],
+                    "strict-transport-security": [
+                        "max-age=31536000; includeSubdomains; preload"
+                    ],
+                    "vary": [
+                        "Accept-Encoding"
+                    ],
+                    "x-frame-options": [
+                        "Deny"
+                    ],
+                    "content-length": [
+                        "51"
+                    ],
                     "x-content-type-options": [
                         "nosniff"
                     ],
-                    "server": [
-                        "Apache/2.4.10 (Debian)"
-                    ],
-                    "set-cookie": [
-                        "phsid=A%2F4c7iamnsn57y6qpccmbesf4ooflmkqvt4m6udawl; expires=Fri, 01-Mar-2024 00:12:40 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly"
-                    ],
-                    "strict-transport-security": [
-                        "max-age=0; includeSubdomains; preload"
+                    "expires": [
+                        "Sat, 01 Jan 2000 00:00:00 GMT"
                     ]
+                },
+                "body": {
+                    "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
                 }
             }
         },
         {
             "request": {
-                "method": "POST",
-                "body": "diff_id=14305&data=%7B%229c64e1fc33e1b9a70eb60643fe96a4d5badad9dc%22%3A+%7B%22author%22%3A+%22test%22%2C+%22authorEmail%22%3A+%22test%22%2C+%22time%22%3A+0.0%7D%7D&api.token=cli-hahayouwish&name=local%3Acommits",
-                "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
                 "headers": {
+                    "user-agent": [
+                        "mercurial/proto-1.0 (Mercurial 5.0+93-d811f17090a3+20190505)"
+                    ],
+                    "content-length": [
+                        "257"
+                    ],
                     "content-type": [
                         "application/x-www-form-urlencoded"
                     ],
                     "accept": [
                         "application/mercurial-0.1"
                     ],
-                    "user-agent": [
-                        "mercurial/proto-1.0 (Mercurial 4.9+477-7c86ec0ca5c5+20190303)"
-                    ],
                     "host": [
                         "phab.mercurial-scm.org"
-                    ],
-                    "content-length": [
-                        "227"
                     ]
-                }
+                },
+                "body": "diff_id=1901&data=%7B%222837deb84f4ab1315c1197b8aef10c620465e352%22%3A+%7B%22author%22%3A+%22test%22%2C+%22authorEmail%22%3A+%22test%22%2C+%22branch%22%3A+%22default%22%2C+%22commit%22%3A+%222837deb84f4ab1315c1197b8aef10c620465e352%22%2C+%22parents%22%3A+%5B%22d940d39fb603f29ea5df4b7c15f315fe6ff4e346%22%5D%2C+%22time%22%3A+0%7D%7D&name=local%3Acommits&api.token=cli-hahayouwish",
+                "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
+                "method": "POST"
             },
             "response": {
                 "status": {
                     "code": 200,
                     "message": "OK"
                 },
-                "body": {
-                    "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
-                },
                 "headers": {
-                    "expires": [
-                        "Sat, 01 Jan 2000 00:00:00 GMT"
-                    ],
                     "x-xss-protection": [
                         "1; mode=block"
                     ],
-                    "transfer-encoding": [
-                        "chunked"
-                    ],
-                    "date": [
-                        "Sun, 03 Mar 2019 00:12:40 GMT"
-                    ],
-                    "x-frame-options": [
-                        "Deny"
-                    ],
                     "cache-control": [
                         "no-store"
                     ],
                     "content-type": [
                         "application/json"
                     ],
+                    "date": [
+                        "Sun, 05 May 2019 13:31:19 GMT"
+                    ],
+                    "connection": [
+                        "keep-alive"
+                    ],
+                    "strict-transport-security": [
+                        "max-age=31536000; includeSubdomains; preload"
+                    ],
+                    "vary": [
+                        "Accept-Encoding"
+                    ],
+                    "x-frame-options": [
+                        "Deny"
+                    ],
+                    "content-length": [
+                        "51"
+                    ],
                     "x-content-type-options": [
                         "nosniff"
                     ],
-                    "server": [
-                        "Apache/2.4.10 (Debian)"
-                    ],
-                    "set-cookie": [
-                        "phsid=A%2Ftdudqohojcq4hyc7gl4kthzkhuq3nmcxgnunpbjm; expires=Fri, 01-Mar-2024 00:12:40 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly"
-                    ],
-                    "strict-transport-security": [
-                        "max-age=0; includeSubdomains; preload"
+                    "expires": [
+                        "Sat, 01 Jan 2000 00:00:00 GMT"
                     ]
+                },
+                "body": {
+                    "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
                 }
             }
         }
-    ],
-    "version": 1
+    ]
 }
--- a/tests/test-absorb.t	Mon May 20 10:08:28 2019 +0200
+++ b/tests/test-absorb.t	Mon May 20 11:40:47 2019 -0400
@@ -176,7 +176,7 @@
      b2                        2:c9b20c925790
    * ba                        2:c9b20c925790
 
-Non-mofified files are ignored:
+Non-modified files are ignored:
 
   $ touch b
   $ hg commit -A b -m b
@@ -225,10 +225,15 @@
   2: 4d
   2: insert aftert 4d
 
+  $ hg co -qC 1
+  $ sedi 's/Insert/insert/' a
+  $ hg absorb --apply-changes
+  abort: no mutable changeset to change
+  [255]
+
 Make working copy clean:
 
-  $ hg revert -q -C a b
-  $ hg forget c
+  $ hg co -qC ba
   $ rm c
   $ hg status
 
@@ -261,7 +266,7 @@
   $ echo 2 >> m1
   $ echo 2 >> m2
   $ hg absorb --apply-changes
-  abort: no mutable changeset to change
+  abort: cannot absorb into a merge
   [255]
   $ hg revert -q -C m1 m2
 
--- a/tests/test-annotate.t	Mon May 20 10:08:28 2019 +0200
+++ b/tests/test-annotate.t	Mon May 20 11:40:47 2019 -0400
@@ -1,4 +1,7 @@
-  $ HGMERGE=true; export HGMERGE
+  $ cat >> "$HGRCPATH" << EOF
+  > [ui]
+  > merge = :merge3
+  > EOF
 
 init
 
@@ -210,8 +213,34 @@
   created new head
   $ hg merge
   merging b
-  0 files updated, 1 files merged, 0 files removed, 0 files unresolved
-  (branch merge, don't forget to commit)
+  warning: conflicts while merging b! (edit, then use 'hg resolve --mark')
+  0 files updated, 0 files merged, 0 files removed, 1 files unresolved
+  use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
+  [1]
+  $ cat b
+  a
+  a
+  a
+  <<<<<<< working copy: 5fbdc1152d97 - test: b2.1
+  b4
+  c
+  b5
+  ||||||| base
+  =======
+  b4
+  b5
+  b6
+  >>>>>>> merge rev:    37ec9f5c3d1f - test: b2
+  $ cat <<EOF > b
+  > a
+  > a
+  > a
+  > b4
+  > c
+  > b5
+  > EOF
+  $ hg resolve --mark -q
+  $ rm b.orig
   $ hg ci -mmergeb -d '3 0'
 
 annotate after merge
@@ -244,15 +273,31 @@
   > EOF
   $ hg ci -mc -d '3 0'
   created new head
+Work around the pure version not resolving the conflict like native code
+#if pure
+  $ hg merge
+  merging b
+  warning: conflicts while merging b! (edit, then use 'hg resolve --mark')
+  0 files updated, 0 files merged, 0 files removed, 1 files unresolved
+  use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
+  [1]
+  $ cat <<EOF > b
+  > a
+  > z
+  > a
+  > b4
+  > c
+  > b5
+  > EOF
+  $ hg resolve -m b
+  (no more unresolved files)
+  $ rm b.orig
+#else
   $ hg merge
   merging b
   0 files updated, 1 files merged, 0 files removed, 0 files unresolved
   (branch merge, don't forget to commit)
-  $ cat <<EOF >> b
-  > b4
-  > c
-  > b5
-  > EOF
+#endif
   $ echo d >> b
   $ hg ci -mmerge2 -d '4 0'
 
@@ -695,8 +740,41 @@
   27: baz:3+->3-
   $ hg merge 25
   merging baz and qux to qux
-  0 files updated, 1 files merged, 0 files removed, 0 files unresolved
-  (branch merge, don't forget to commit)
+  warning: conflicts while merging qux! (edit, then use 'hg resolve --mark')
+  0 files updated, 0 files merged, 0 files removed, 1 files unresolved
+  use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
+  [1]
+  $ cat qux
+  0
+  0
+  1 baz:1
+  2 baz:2
+  <<<<<<< working copy: 863de62655ef - test: baz:3+->3-
+  3- baz:3
+  4 baz:4
+  ||||||| base
+  3+ baz:3
+  4 baz:4
+  =======
+  3+ baz:3
+  4+ baz:4
+  >>>>>>> merge rev:    cb8df70ae185 - test: qux:4->4+
+  5
+  6
+  7
+  $ cat > qux <<EOF
+  > 0
+  > 0
+  > 1 baz:1
+  > 2 baz:2
+  > 3- baz:3
+  > 4 baz:4
+  > 5
+  > 6
+  > 7
+  > EOF
+  $ hg resolve --mark -q
+  $ rm qux.orig
   $ hg ci -m merge
   $ hg log -T '{rev}: {desc}\n' -r 'followlines(qux, 5:7)'
   16: baz:0
@@ -709,8 +787,40 @@
   $ hg up 25 --quiet
   $ hg merge 27
   merging qux and baz to qux
-  0 files updated, 1 files merged, 0 files removed, 0 files unresolved
-  (branch merge, don't forget to commit)
+  warning: conflicts while merging qux! (edit, then use 'hg resolve --mark')
+  0 files updated, 0 files merged, 0 files removed, 1 files unresolved
+  use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
+  [1]
+  $ cat qux
+  0
+  0
+  1 baz:1
+  2 baz:2
+  <<<<<<< working copy: cb8df70ae185 - test: qux:4->4+
+  3+ baz:3
+  4+ baz:4
+  ||||||| base
+  3+ baz:3
+  4 baz:4
+  =======
+  3- baz:3
+  4 baz:4
+  >>>>>>> merge rev:    863de62655ef - test: baz:3+->3-
+  5
+  6
+  7
+  $ cat > qux <<EOF
+  > 0
+  > 0
+  > 1 baz:1
+  > 2 baz:2
+  > 3+ baz:3
+  > 4+ baz:4
+  > 5
+  > 6
+  > EOF
+  $ hg resolve --mark -q
+  $ rm qux.orig
   $ hg ci -m 'merge from other side'
   created new head
   $ hg log -T '{rev}: {desc}\n' -r 'followlines(qux, 5:7)'
@@ -1061,6 +1171,19 @@
   $ echo 3 >> a
   $ hg commit -m 3 -q
   $ hg merge 2 -q
+  warning: conflicts while merging a! (edit, then use 'hg resolve --mark')
+  [1]
+  $ cat a
+  <<<<<<< working copy: 0a068f0261cf - test: 3
+  1
+  2
+  3
+  ||||||| base
+  1
+  2
+  =======
+  a
+  >>>>>>> merge rev:    9409851bc20a - test: a
   $ cat > a << EOF
   > b
   > 1
@@ -1069,6 +1192,7 @@
   > a
   > EOF
   $ hg resolve --mark -q
+  $ rm a.orig
   $ hg commit -m m
   $ hg annotate a
   4: b
--- a/tests/test-bisect.t	Mon May 20 10:08:28 2019 +0200
+++ b/tests/test-bisect.t	Mon May 20 11:40:47 2019 -0400
@@ -600,6 +600,129 @@
   summary:     msg 30
   
 
+Rewritten commits should not crash
+
+  $ hg co 29
+  1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  $ hg revert --all -r 30
+  reverting a
+  $ hg commit -m 'msg 30 -- fixed'
+  created new head
+  $ hg debugobsolete `hg id --debug -i -r 30` `hg id --debug -i -r .`
+  obsoleted 1 changesets
+  $ hg bisect
+  The first bad revision is:
+  changeset:   30:ed2d2f24b11c
+  user:        test
+  date:        Thu Jan 01 00:00:30 1970 +0000
+  obsolete:    rewritten as 32:8a638ebd1122
+  summary:     msg 30
+  
+
+Log template does not crash
+
+  $ hg log -GTbisect -r 15::
+  @  changeset:   32:8a638ebd1122
+  |  bisect:      good (implicit)
+  |  tag:         tip
+  |  parent:      29:b5bd63375ab9
+  |  user:        test
+  |  date:        Thu Jan 01 00:00:00 1970 +0000
+  |  summary:     msg 30 -- fixed
+  |
+  o  changeset:   29:b5bd63375ab9
+  |  bisect:      good
+  |  user:        test
+  |  date:        Thu Jan 01 00:00:29 1970 +0000
+  |  summary:     msg 29
+  |
+  o  changeset:   28:8e0c2264c8af
+  |  bisect:      good
+  |  user:        test
+  |  date:        Thu Jan 01 00:00:28 1970 +0000
+  |  summary:     msg 28
+  |
+  o  changeset:   27:288867a866e9
+  |  bisect:      ignored
+  |  user:        test
+  |  date:        Thu Jan 01 00:00:27 1970 +0000
+  |  summary:     msg 27
+  |
+  o  changeset:   26:3efc6fd51aeb
+  |  bisect:      good
+  |  user:        test
+  |  date:        Thu Jan 01 00:00:26 1970 +0000
+  |  summary:     msg 26
+  |
+  o  changeset:   25:02a84173a97a
+  |  bisect:      ignored
+  |  user:        test
+  |  date:        Thu Jan 01 00:00:25 1970 +0000
+  |  summary:     msg 25
+  |
+  o  changeset:   24:10e0acd3809e
+  |  bisect:      ignored
+  |  user:        test
+  |  date:        Thu Jan 01 00:00:24 1970 +0000
+  |  summary:     msg 24
+  |
+  o  changeset:   23:5ec79163bff4
+  |  bisect:      ignored
+  |  user:        test
+  |  date:        Thu Jan 01 00:00:23 1970 +0000
+  |  summary:     msg 23
+  |
+  o  changeset:   22:06c7993750ce
+  |  bisect:      good
+  |  user:        test
+  |  date:        Thu Jan 01 00:00:22 1970 +0000
+  |  summary:     msg 22
+  |
+  o  changeset:   21:e5db6aa3fe2a
+  |  bisect:      ignored
+  |  user:        test
+  |  date:        Thu Jan 01 00:00:21 1970 +0000
+  |  summary:     msg 21
+  |
+  o  changeset:   20:7128fb4fdbc9
+  |  bisect:      ignored
+  |  user:        test
+  |  date:        Thu Jan 01 00:00:20 1970 +0000
+  |  summary:     msg 20
+  |
+  o  changeset:   19:52798545b482
+  |  bisect:      ignored
+  |  user:        test
+  |  date:        Thu Jan 01 00:00:19 1970 +0000
+  |  summary:     msg 19
+  |
+  o  changeset:   18:86977a90077e
+  |  bisect:      ignored
+  |  user:        test
+  |  date:        Thu Jan 01 00:00:18 1970 +0000
+  |  summary:     msg 18
+  |
+  o  changeset:   17:03515f4a9080
+  |  bisect:      ignored
+  |  user:        test
+  |  date:        Thu Jan 01 00:00:17 1970 +0000
+  |  summary:     msg 17
+  |
+  o  changeset:   16:a2e6ea4973e9
+  |  bisect:      ignored
+  |  user:        test
+  |  date:        Thu Jan 01 00:00:16 1970 +0000
+  |  summary:     msg 16
+  |
+  o  changeset:   15:e7fa0811edb0
+  |  bisect:      good
+  ~  user:        test
+     date:        Thu Jan 01 00:00:15 1970 +0000
+     summary:     msg 15
+  
+  $ hg debugobsolete --delete `hg debugobsolete --index -T'{index}\n' | tail -1`
+  deleted 1 obsolescence markers
+
 Changeset in the bad:good range is obsolete
 ---------------------------------------------
 
--- a/tests/test-branches.t	Mon May 20 10:08:28 2019 +0200
+++ b/tests/test-branches.t	Mon May 20 11:40:47 2019 -0400
@@ -940,3 +940,35 @@
   0010: 56 46 78 69 00 00 00 01                         |VFxi....|
 
   $ cd ..
+
+Test to make sure that `--close-branch` only works on a branch head:
+--------------------------------------------------------------------
+  $ hg init closebranch
+  $ cd closebranch
+  $ for ch in a b c; do
+  > echo $ch > $ch
+  > hg add $ch
+  > hg ci -m "added "$ch
+  > done;
+
+  $ hg up -r "desc('added b')"
+  0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+
+trying to close branch from a cset which is not a branch head
+it should abort:
+  $ hg ci -m "closing branch" --close-branch
+  abort: can only close branch heads
+  [255]
+
+  $ hg up 0
+  0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+  $ hg log -GT "{rev}: {node|short} {desc|firstline}\n\t{branch}\n\n"
+  o  2: 155349b645be added c
+  |  	default
+  |
+  o  1: 5f6d8a4bf34a added b
+  |  	default
+  |
+  @  0: 9092f1db7931 added a
+     	default
+  
--- a/tests/test-check-code.t	Mon May 20 10:08:28 2019 +0200
+++ b/tests/test-check-code.t	Mon May 20 11:40:47 2019 -0400
@@ -15,6 +15,8 @@
   Skipping contrib/automation/hgautomation/__init__.py it has no-che?k-code (glob)
   Skipping contrib/automation/hgautomation/aws.py it has no-che?k-code (glob)
   Skipping contrib/automation/hgautomation/cli.py it has no-che?k-code (glob)
+  Skipping contrib/automation/hgautomation/linux.py it has no-che?k-code (glob)
+  Skipping contrib/automation/hgautomation/ssh.py it has no-che?k-code (glob)
   Skipping contrib/automation/hgautomation/windows.py it has no-che?k-code (glob)
   Skipping contrib/automation/hgautomation/winrm.py it has no-che?k-code (glob)
   Skipping contrib/packaging/hgpackaging/downloads.py it has no-che?k-code (glob)
--- a/tests/test-clonebundles.t	Mon May 20 10:08:28 2019 +0200
+++ b/tests/test-clonebundles.t	Mon May 20 11:40:47 2019 -0400
@@ -53,7 +53,7 @@
   $ echo 'http://does.not.exist/bundle.hg' > server/.hg/clonebundles.manifest
   $ hg clone http://localhost:$HGPORT 404-url
   applying clone bundle from http://does.not.exist/bundle.hg
-  error fetching bundle: (.* not known|(\[Errno -?\d+])? No address associated with hostname) (re) (no-windows !)
+  error fetching bundle: (.* not known|(\[Errno -?\d+])? [Nn]o address associated with (host)?name) (re) (no-windows !)
   error fetching bundle: [Errno 1100*] getaddrinfo failed (glob) (windows !)
   abort: error applying bundle
   (if this error persists, consider contacting the server operator or disable clone bundles via "--config ui.clonebundles=false")
--- a/tests/test-close-head.t	Mon May 20 10:08:28 2019 +0200
+++ b/tests/test-close-head.t	Mon May 20 11:40:47 2019 -0400
@@ -33,7 +33,11 @@
   $ hg --config extensions.closehead= close-head -m 'Not a head' -r 0 1
   abort: revision is not an open head: 0
   [255]
+  $ hg id
+  000000000000
   $ hg --config extensions.closehead= close-head -m 'Close old heads' -r 1 2
+  $ hg id
+  000000000000
   $ hg bookmark
      @                         1:66f7d451a68b
   $ hg heads
--- a/tests/test-commit-amend.t	Mon May 20 10:08:28 2019 +0200
+++ b/tests/test-commit-amend.t	Mon May 20 11:40:47 2019 -0400
@@ -783,6 +783,20 @@
   
   $ hg debugrename aaa
   aaa renamed from aa:37d9b5d994eab34eda9c16b195ace52c7b129980
+
+Update to p1 with 'aaa' modified. 'aaa' was renamed from 'aa' in p2. 'aa' exists
+in p1 too, but it was recorded as copied from p2.
+  $ echo modified >> aaa
+  $ hg co -m '.^' -t :merge3
+  file 'aaa' was deleted in other [destination] but was modified in local [working copy].
+  What do you want to do?
+  use (c)hanged version, (d)elete, or leave (u)nresolved? u
+  1 files updated, 0 files merged, 1 files removed, 1 files unresolved
+  use 'hg resolve' to retry unresolved file merges
+  [1]
+  $ hg co -C tip
+  2 files updated, 0 files merged, 1 files removed, 0 files unresolved
+
   $ hg mv aaa aa
   $ hg ci --amend -m 'merge bar again (undo rename)'
   $ hg log --config diff.git=1 -pr .
--- a/tests/test-commit.t	Mon May 20 10:08:28 2019 +0200
+++ b/tests/test-commit.t	Mon May 20 11:40:47 2019 -0400
@@ -708,6 +708,8 @@
   abort: path contains illegal component: HG8B6C~2/hgrc
   [255]
 
+  $ cd ..
+
 # test that an unmodified commit template message aborts
 
   $ hg init unmodified_commit_template
@@ -734,6 +736,8 @@
   abort: commit message unchanged
   [255]
 
+  $ cd ..
+
 test that text below the --- >8 --- special string is ignored
 
   $ cat <<'EOF' > $TESTTMP/lowercaseline.sh
@@ -834,3 +838,42 @@
   second line
 
   $ cd ..
+
+testing commands.commit.post-status config option
+
+  $ hg init ci-post-st
+  $ cd ci-post-st
+  $ echo '[commands]' > .hg/hgrc
+  $ echo 'commit.post-status = 1' >> .hg/hgrc
+
+  $ echo 'ignored-file' > .hgignore
+  $ hg ci -qAm 0
+
+  $ echo 'c' > clean-file
+  $ echo 'a' > added-file
+  $ echo '?' > unknown-file
+  $ echo 'i' > ignored-file
+  $ hg add clean-file added-file
+  $ hg ci -m 1 clean-file
+  A added-file
+  ? unknown-file
+  $ hg st -mardu
+  A added-file
+  ? unknown-file
+
+  $ touch modified-file
+  $ hg add modified-file
+  $ hg ci -m 2 modified-file -q
+
+  $ echo 'm' > modified-file
+  $ hg ci --amend -m 'reworded' -X 're:'
+  saved backup bundle to $TESTTMP/ci-post-st/.hg/strip-backup/*-amend.hg (glob)
+  M modified-file
+  A added-file
+  ? unknown-file
+  $ hg st -mardu
+  M modified-file
+  A added-file
+  ? unknown-file
+
+  $ cd ..
--- a/tests/test-completion.t	Mon May 20 10:08:28 2019 +0200
+++ b/tests/test-completion.t	Mon May 20 11:40:47 2019 -0400
@@ -347,7 +347,7 @@
   tip: patch, git, style, template
   unbundle: update
   update: clean, check, merge, date, rev, tool
-  verify: 
+  verify: full
   version: template
 
   $ hg init a
--- a/tests/test-convert-hg-sink.t	Mon May 20 10:08:28 2019 +0200
+++ b/tests/test-convert-hg-sink.t	Mon May 20 11:40:47 2019 -0400
@@ -573,3 +573,52 @@
   M f
   A b-only
   R a-only
+
+Recorded {files} list does not get confused about flags on merge commits
+
+#if execbit
+  $ cd ..
+  $ hg init merge-flags-orig
+  $ cd merge-flags-orig
+  $ echo 0 > 0
+  $ hg ci -Aqm 'add 0'
+  $ echo a > a
+  $ chmod +x a
+  $ hg ci -qAm 'add executable file'
+  $ hg co -q 0
+  $ echo b > b
+  $ hg ci -qAm 'add file'
+  $ hg merge -q
+  $ hg ci -m 'merge'
+  $ hg log -G -T '{rev} {desc}\n'
+  @    3 merge
+  |\
+  | o  2 add file
+  | |
+  o |  1 add executable file
+  |/
+  o  0 add 0
+  
+
+# No files changed
+  $ hg log -r 3 -T '{files}\n'
+  
+
+  $ cd ..
+  $ hg convert merge-flags-orig merge-flags-new -q
+  $ cd merge-flags-new
+  $ hg log -G -T '{rev} {desc}\n'
+  o    3 merge
+  |\
+  | o  2 add file
+  | |
+  o |  1 add executable file
+  |/
+  o  0 add 0
+  
+
+# Still no files
+  $ hg log -r 3 -T '{files}\n'
+  
+
+#endif
--- a/tests/test-copies-in-changeset.t	Mon May 20 10:08:28 2019 +0200
+++ b/tests/test-copies-in-changeset.t	Mon May 20 11:40:47 2019 -0400
@@ -7,6 +7,8 @@
   > changesetcopies = log -r . -T 'files: {files}
   >   {extras % "{ifcontains("copies", key, "{key}: {value}\n")}"}'
   > showcopies = log -r . -T '{file_copies % "{source} -> {name}\n"}'
+  > [extensions]
+  > rebase =
   > EOF
 
 Check that copies are recorded correctly
@@ -101,6 +103,7 @@
   $ hg changesetcopies
   files: j
   p1copies: j\x00a (esc)
+  p2copies: 
   $ hg debugdata j 0
   \x01 (esc)
   copy: a
@@ -113,6 +116,14 @@
   a -> j
   $ hg showcopies --config experimental.copies.read-from=filelog-only
   a -> j
+The entries should be written to extras even if they're empty (so the client
+won't have to fall back to reading from filelogs)
+  $ echo x >> j
+  $ hg ci -m 'modify j' --config experimental.copies.write-to=compatibility
+  $ hg changesetcopies
+  files: j
+  p1copies: 
+  p2copies: 
 
 Test writing only to filelog
 
@@ -133,3 +144,24 @@
   a -> k
 
   $ cd ..
+
+Test rebasing a commit with copy information
+
+  $ hg init rebase-rename
+  $ cd rebase-rename
+  $ echo a > a
+  $ hg ci -Aqm 'add a'
+  $ echo a2 > a
+  $ hg ci -m 'modify a'
+  $ hg co -q 0
+  $ hg mv a b
+  $ hg ci -qm 'rename a to b'
+  $ hg rebase -d 1 --config rebase.experimental.inmemory=yes
+  rebasing 2:55d0b405c1b2 "rename a to b" (tip)
+  merging a and b to b
+  saved backup bundle to $TESTTMP/rebase-rename/.hg/strip-backup/55d0b405c1b2-78df867e-rebase.hg
+  $ hg st --change . --copies
+  A b
+    a
+  R a
+  $ cd ..
--- a/tests/test-copies.t	Mon May 20 10:08:28 2019 +0200
+++ b/tests/test-copies.t	Mon May 20 11:40:47 2019 -0400
@@ -504,9 +504,9 @@
   x -> z
 
 
-Test for a case in fullcopytracing algorithm where both the merging csets are
-"dirty"; where a dirty cset means that cset is descendant of merge base. This
-test reflect that for this particular case this algorithm correctly find the copies:
+Test for a case in fullcopytracing algorithm where neither of the merging csets
+is a descendant of the merge base. This test reflects that the algorithm
+correctly finds the copies:
 
   $ cat >> $HGRCPATH << EOF
   > [experimental]
@@ -549,9 +549,6 @@
 
 Grafting revision 4 on top of revision 2, showing that it respect the rename:
 
-TODO: Make this work with copy info in changesets (probably by writing a
-changeset-centric version of copies.mergecopies())
-#if no-changeset
   $ hg up 2 -q
   $ hg graft -r 4 --base 3 --hidden
   grafting 4:af28412ec03c "added d, modified b" (tip)
@@ -560,17 +557,16 @@
   $ hg l -l1 -p
   @  5 added d, modified b
   |  b1
-  ~  diff -r 5a4825cc2926 -r 94a2f1a0e8e2 b1
+  ~  diff -r 5a4825cc2926 -r 94a2f1a0e8e2 b1 (no-changeset !)
+  ~  diff -r f5474f5023a8 -r ef7c02d69f3d b1 (changeset !)
      --- a/b1	Thu Jan 01 00:00:00 1970 +0000
      +++ b/b1	Thu Jan 01 00:00:00 1970 +0000
      @@ -1,1 +1,2 @@
       b
      +baba
   
-#endif
-
-Test to make sure that fullcopytracing algorithm don't fail when both the merging csets are dirty
-(a dirty cset is one who is not the descendant of merge base)
+Test to make sure that fullcopytracing algorithm doesn't fail when neither of the
+merging csets is a descendant of the base.
 -------------------------------------------------------------------------------------------------
 
   $ newrepo
@@ -641,8 +637,8 @@
   o  0 added a
      a
 
-Now if we trigger a merge between cset revision 3 and 6 using base revision 4, in this case
-both the merging csets will be dirty as no one is descendent of base revision:
+Now if we trigger a merge between revision 3 and 6 using base revision 4,
+neither of the merging csets will be a descendant of the base revision:
 
   $ hg graft -r 6 --base 4 --hidden -t :other
   grafting 6:99802e4f1e46 "added willconflict and d" (tip)
--- a/tests/test-copy-move-merge.t	Mon May 20 10:08:28 2019 +0200
+++ b/tests/test-copy-move-merge.t	Mon May 20 11:40:47 2019 -0400
@@ -23,7 +23,6 @@
   $ hg ci -qAm "other"
 
   $ hg merge --debug
-    searching for copies back to rev 1
     unmatched files in other:
      b
      c
--- a/tests/test-copytrace-heuristics.t	Mon May 20 10:08:28 2019 +0200
+++ b/tests/test-copytrace-heuristics.t	Mon May 20 11:40:47 2019 -0400
@@ -16,6 +16,9 @@
   > [extensions]
   > rebase=
   > shelve=
+  > [alias]
+  > l = log -G -T 'rev: {rev}\ndesc: {desc}\n'
+  > pl = log -G -T 'rev: {rev}, phase: {phase}\ndesc: {desc}\n'
   > EOF
 
 NOTE: calling initclient() set copytrace.sourcecommitlimit=-1 as we want to
@@ -43,13 +46,13 @@
   $ echo b > dir/file.txt
   $ hg ci -qm 'mod a, mod dir/file.txt'
 
-  $ hg log -G -T 'changeset: {node}\n desc: {desc}\n'
-  @  changeset: 557f403c0afd2a3cf15d7e2fb1f1001a8b85e081
-  |   desc: mod a, mod dir/file.txt
-  | o  changeset: 928d74bc9110681920854d845c06959f6dfc9547
-  |/    desc: mv a b, mv dir/ dir2/
-  o  changeset: 3c482b16e54596fed340d05ffaf155f156cda7ee
-      desc: initial
+  $ hg l
+  @  rev: 2
+  |  desc: mod a, mod dir/file.txt
+  | o  rev: 1
+  |/   desc: mv a b, mv dir/ dir2/
+  o  rev: 0
+     desc: initial
 
   $ hg rebase -s . -d 1
   rebasing 2:557f403c0afd "mod a, mod dir/file.txt" (tip)
@@ -76,13 +79,13 @@
   $ printf 'somecontent\nmoarcontent' > a
   $ hg ci -qm 'mode a'
 
-  $ hg log -G -T 'changeset: {node}\n desc: {desc}\n'
-  @  changeset: d526312210b9e8f795d576a77dc643796384d86e
-  |   desc: mode a
-  | o  changeset: 46985f76c7e5e5123433527f5c8526806145650b
-  |/    desc: rm a, add b
-  o  changeset: e5b71fb099c29d9172ef4a23485aaffd497e4cc0
-      desc: initial
+  $ hg l
+  @  rev: 2
+  |  desc: mode a
+  | o  rev: 1
+  |/   desc: rm a, add b
+  o  rev: 0
+     desc: initial
 
   $ hg rebase -s . -d 1
   rebasing 2:d526312210b9 "mode a" (tip)
@@ -113,15 +116,15 @@
   $ echo b > a
   $ hg ci -qm 'mod a'
 
-  $ hg log -G -T 'changeset: {node}\n desc: {desc}, phase: {phase}\n'
-  @  changeset: 9d5cf99c3d9f8e8b05ba55421f7f56530cfcf3bc
-  |   desc: mod a, phase: draft
-  | o  changeset: d760186dd240fc47b91eb9f0b58b0002aaeef95d
-  |/    desc: mv a b, phase: draft
-  o  changeset: 48e1b6ba639d5d7fb313fa7989eebabf99c9eb83
-  |   desc: randomcommit, phase: draft
-  o  changeset: e5b71fb099c29d9172ef4a23485aaffd497e4cc0
-      desc: initial, phase: draft
+  $ hg pl
+  @  rev: 3, phase: draft
+  |  desc: mod a
+  | o  rev: 2, phase: draft
+  |/   desc: mv a b
+  o  rev: 1, phase: draft
+  |  desc: randomcommit
+  o  rev: 0, phase: draft
+     desc: initial
 
   $ hg rebase -s . -d 2
   rebasing 3:9d5cf99c3d9f "mod a" (tip)
@@ -148,15 +151,15 @@
   $ echo b > b
   $ hg ci -qm 'mod b'
 
-  $ hg log -G -T 'changeset: {node}\n desc: {desc}\n'
-  @  changeset: fbe97126b3969056795c462a67d93faf13e4d298
-  |   desc: mod b
-  o  changeset: d760186dd240fc47b91eb9f0b58b0002aaeef95d
-  |   desc: mv a b
-  o  changeset: 48e1b6ba639d5d7fb313fa7989eebabf99c9eb83
-  |   desc: randomcommit
-  o  changeset: e5b71fb099c29d9172ef4a23485aaffd497e4cc0
-      desc: initial
+  $ hg l
+  @  rev: 3
+  |  desc: mod b
+  o  rev: 2
+  |  desc: mv a b
+  o  rev: 1
+  |  desc: randomcommit
+  o  rev: 0
+     desc: initial
 
   $ hg rebase -s . -d 0
   rebasing 3:fbe97126b396 "mod b" (tip)
@@ -185,15 +188,15 @@
   $ echo b > dir/a
   $ hg ci -qm 'mod dir/a'
 
-  $ hg log -G -T 'changeset: {node}\n desc: {desc}\n'
-  @  changeset: 6b2f4cece40fd320f41229f23821256ffc08efea
-  |   desc: mod dir/a
-  | o  changeset: 4494bf7efd2e0dfdd388e767fb913a8a3731e3fa
-  | |   desc: create dir2/a
-  | o  changeset: b1784dfab6ea6bfafeb11c0ac50a2981b0fe6ade
-  |/    desc: mv dir/a dir/b
-  o  changeset: 36859b8907c513a3a87ae34ba5b1e7eea8c20944
-      desc: initial
+  $ hg l
+  @  rev: 3
+  |  desc: mod dir/a
+  | o  rev: 2
+  | |  desc: create dir2/a
+  | o  rev: 1
+  |/   desc: mv dir/a dir/b
+  o  rev: 0
+     desc: initial
 
   $ hg rebase -s . -d 2
   rebasing 3:6b2f4cece40f "mod dir/a" (tip)
@@ -230,13 +233,13 @@
   $ hg ci -m 'mod a'
   created new head
 
-  $ hg log -G -T 'changeset: {node}\n desc: {desc}\n'
-  @  changeset: ef716627c70bf4ca0bdb623cfb0d6fe5b9acc51e
-  |   desc: mod a
-  | o  changeset: 8329d5c6bf479ec5ca59b9864f3f45d07213f5a4
-  |/    desc: mv a foo, add many files
-  o  changeset: 1451231c87572a7d3f92fc210b4b35711c949a98
-      desc: initial
+  $ hg l
+  @  rev: 2
+  |  desc: mod a
+  | o  rev: 1
+  |/   desc: mv a foo, add many files
+  o  rev: 0
+     desc: initial
 
 With small limit
 
@@ -278,13 +281,13 @@
   $ hg ci -m 'del a'
   created new head
 
-  $ hg log -G -T 'changeset: {node}\n desc: {desc}, phase: {phase}\n'
-  @  changeset: 7d61ee3b1e48577891a072024968428ba465c47b
-  |   desc: del a, phase: draft
-  | o  changeset: 472e38d57782172f6c6abed82a94ca0d998c3a22
-  |/    desc: mv a b, phase: draft
-  o  changeset: 1451231c87572a7d3f92fc210b4b35711c949a98
-      desc: initial, phase: draft
+  $ hg pl
+  @  rev: 2, phase: draft
+  |  desc: del a
+  | o  rev: 1, phase: draft
+  |/   desc: mv a b
+  o  rev: 0, phase: draft
+     desc: initial
 
   $ hg rebase -s 1 -d 2
   rebasing 1:472e38d57782 "mv a b"
@@ -311,13 +314,13 @@
   $ hg mv -q dir/ dir2
   $ hg ci -qm 'mv dir/ dir2/'
 
-  $ hg log -G -T 'changeset: {node}\n desc: {desc}\n'
-  @  changeset: a33d80b6e352591dfd82784e1ad6cdd86b25a239
-  |   desc: mv dir/ dir2/
-  | o  changeset: 6b2f4cece40fd320f41229f23821256ffc08efea
-  |/    desc: mod dir/a
-  o  changeset: 36859b8907c513a3a87ae34ba5b1e7eea8c20944
-      desc: initial
+  $ hg l
+  @  rev: 2
+  |  desc: mv dir/ dir2/
+  | o  rev: 1
+  |/   desc: mod dir/a
+  o  rev: 0
+     desc: initial
 
   $ hg rebase -s . -d 1
   rebasing 2:a33d80b6e352 "mv dir/ dir2/" (tip)
@@ -345,15 +348,15 @@
   $ hg ci -m 'mod a'
   created new head
 
-  $ hg log -G -T 'changeset: {node}\n desc: {desc}\n'
-  @  changeset: d413169422167a3fa5275fc5d71f7dea9f5775f3
-  |   desc: mod a
-  | o  changeset: d3efd280421d24f9f229997c19e654761c942a71
-  | |   desc: mv b c
-  | o  changeset: 472e38d57782172f6c6abed82a94ca0d998c3a22
-  |/    desc: mv a b
-  o  changeset: 1451231c87572a7d3f92fc210b4b35711c949a98
-      desc: initial
+  $ hg l
+  @  rev: 3
+  |  desc: mod a
+  | o  rev: 2
+  | |  desc: mv b c
+  | o  rev: 1
+  |/   desc: mv a b
+  o  rev: 0
+     desc: initial
   $ hg rebase -s . -d 2
   rebasing 3:d41316942216 "mod a" (tip)
   merging c and a to c
@@ -379,15 +382,15 @@
   $ echo c > a
   $ hg ci -m 'mod a'
   created new head
-  $ hg log -G -T 'changeset: {node}\n desc: {desc}\n'
-  @  changeset: d413169422167a3fa5275fc5d71f7dea9f5775f3
-  |   desc: mod a
-  | o  changeset: d3efd280421d24f9f229997c19e654761c942a71
-  | |   desc: mv b c
-  | o  changeset: 472e38d57782172f6c6abed82a94ca0d998c3a22
-  |/    desc: mv a b
-  o  changeset: 1451231c87572a7d3f92fc210b4b35711c949a98
-      desc: initial
+  $ hg l
+  @  rev: 3
+  |  desc: mod a
+  | o  rev: 2
+  | |  desc: mv b c
+  | o  rev: 1
+  |/   desc: mv a b
+  o  rev: 0
+     desc: initial
   $ hg rebase -s 1 -d .
   rebasing 1:472e38d57782 "mv a b"
   merging a and b to b
@@ -417,15 +420,15 @@
   $ hg ci -m 'mod a'
   created new head
 
-  $ hg log -G -T 'changeset: {node}\n desc: {desc}\n'
-  @  changeset: ef716627c70bf4ca0bdb623cfb0d6fe5b9acc51e
-  |   desc: mod a
-  | o  changeset: b1a6187e79fbce851bb584eadcb0cc4a80290fd9
-  | |   desc: add c
-  | o  changeset: 472e38d57782172f6c6abed82a94ca0d998c3a22
-  |/    desc: mv a b
-  o  changeset: 1451231c87572a7d3f92fc210b4b35711c949a98
-      desc: initial
+  $ hg l
+  @  rev: 3
+  |  desc: mod a
+  | o  rev: 2
+  | |  desc: add c
+  | o  rev: 1
+  |/   desc: mv a b
+  o  rev: 0
+     desc: initial
 
   $ hg rebase -s . -d 2
   rebasing 3:ef716627c70b "mod a" (tip)
@@ -455,13 +458,13 @@
   created new head
   $ hg up -q 2
 
-  $ hg log -G -T 'changeset: {node}\n desc: {desc}\n'
-  @  changeset: 472e38d57782172f6c6abed82a94ca0d998c3a22
-  |   desc: mv a b
-  | o  changeset: b0357b07f79129a3d08a68621271ca1352ae8a09
-  |/    desc: modify a
-  o  changeset: 1451231c87572a7d3f92fc210b4b35711c949a98
-      desc: initial
+  $ hg l
+  @  rev: 2
+  |  desc: mv a b
+  | o  rev: 1
+  |/   desc: modify a
+  o  rev: 0
+     desc: initial
 
   $ hg merge 1
   merging b and a to b
@@ -490,13 +493,13 @@
   $ hg ci -m 'mod a'
   created new head
 
-  $ hg log -G -T 'changeset: {node}\n desc: {desc}\n'
-  @  changeset: ef716627c70bf4ca0bdb623cfb0d6fe5b9acc51e
-  |   desc: mod a
-  | o  changeset: 4fc3fd13fbdb89ada6b75bfcef3911a689a0dde8
-  |/    desc: cp a c, mv a b
-  o  changeset: 1451231c87572a7d3f92fc210b4b35711c949a98
-      desc: initial
+  $ hg l
+  @  rev: 2
+  |  desc: mod a
+  | o  rev: 1
+  |/   desc: cp a c, mv a b
+  o  rev: 0
+     desc: initial
 
   $ hg rebase -s . -d 1
   rebasing 2:ef716627c70b "mod a" (tip)
@@ -530,32 +533,32 @@
   $ hg mv b c
   $ hg ci -qm 'mv b c'
   $ hg up -q 1
-  $ hg log -G -T 'changeset: {node}\n desc: {desc}\n'
-  o  changeset: d3efd280421d24f9f229997c19e654761c942a71
-  |   desc: mv b c
-  o  changeset: 472e38d57782172f6c6abed82a94ca0d998c3a22
-  |   desc: mv a b
-  | @  changeset: ef716627c70bf4ca0bdb623cfb0d6fe5b9acc51e
-  |/    desc: mod a
-  o  changeset: 1451231c87572a7d3f92fc210b4b35711c949a98
-      desc: initial
+  $ hg l
+  o  rev: 3
+  |  desc: mv b c
+  o  rev: 2
+  |  desc: mv a b
+  | @  rev: 1
+  |/   desc: mod a
+  o  rev: 0
+     desc: initial
 
   $ hg merge 3
   merging a and c to c
   0 files updated, 1 files merged, 0 files removed, 0 files unresolved
   (branch merge, don't forget to commit)
   $ hg ci -qm 'merge'
-  $ hg log -G -T 'changeset: {node}\n desc: {desc}, phase: {phase}\n'
-  @    changeset: cd29b0d08c0f39bfed4cde1b40e30f419db0c825
-  |\    desc: merge, phase: draft
-  | o  changeset: d3efd280421d24f9f229997c19e654761c942a71
-  | |   desc: mv b c, phase: draft
-  | o  changeset: 472e38d57782172f6c6abed82a94ca0d998c3a22
-  | |   desc: mv a b, phase: draft
-  o |  changeset: ef716627c70bf4ca0bdb623cfb0d6fe5b9acc51e
-  |/    desc: mod a, phase: draft
-  o  changeset: 1451231c87572a7d3f92fc210b4b35711c949a98
-      desc: initial, phase: draft
+  $ hg pl
+  @    rev: 4, phase: draft
+  |\   desc: merge
+  | o  rev: 3, phase: draft
+  | |  desc: mv b c
+  | o  rev: 2, phase: draft
+  | |  desc: mv a b
+  o |  rev: 1, phase: draft
+  |/   desc: mod a
+  o  rev: 0, phase: draft
+     desc: initial
   $ ls
   c
   $ cd ..
@@ -577,11 +580,11 @@
   $ hg mv a b
   $ hg ci -m 'mv a b'
 
-  $ hg log -G -T 'changeset: {node}\n desc: {desc}\n'
-  @  changeset: 472e38d57782172f6c6abed82a94ca0d998c3a22
-  |   desc: mv a b
-  o  changeset: 1451231c87572a7d3f92fc210b4b35711c949a98
-      desc: initial
+  $ hg l
+  @  rev: 1
+  |  desc: mv a b
+  o  rev: 0
+     desc: initial
   $ hg unshelve
   unshelving change 'default'
   rebasing shelved changes
@@ -614,13 +617,13 @@
   $ cd ..
   $ hg ci -qm 'mod a'
 
-  $ hg log -G -T 'changeset {node}\n desc {desc}, phase: {phase}\n'
-  @  changeset 6207d2d318e710b882e3d5ada2a89770efc42c96
-  |   desc mod a, phase: draft
-  | o  changeset abffdd4e3dfc04bc375034b970299b2a309a1cce
-  |/    desc mv a b; mv dir1 dir2, phase: draft
-  o  changeset 81973cd24b58db2fdf18ce3d64fb2cc3284e9ab3
-      desc initial, phase: draft
+  $ hg pl
+  @  rev: 2, phase: draft
+  |  desc: mod a
+  | o  rev: 1, phase: draft
+  |/   desc: mv a b; mv dir1 dir2
+  o  rev: 0, phase: draft
+     desc: initial
 
   $ hg rebase -s . -d 1 --config experimental.copytrace.sourcecommitlimit=100
   rebasing 2:6207d2d318e7 "mod a" (tip)
@@ -652,13 +655,13 @@
   $ hg mv -q dir1 dir2
   $ hg ci -qm 'mv dir1 dir2'
 
-  $ hg log -G -T 'changeset {node}\n desc {desc}, phase: {phase}\n'
-  @  changeset e8919e7df8d036e07b906045eddcd4a42ff1915f
-  |   desc mv dir1 dir2, phase: draft
-  | o  changeset 7c7c6f339be00f849c3cb2df738ca91db78b32c8
-  |/    desc hg add dir1/a, phase: draft
-  o  changeset a235dcce55dcf42034c4e374cb200662d0bb4a13
-      desc initial, phase: draft
+  $ hg pl
+  @  rev: 2, phase: draft
+  |  desc: mv dir1 dir2
+  | o  rev: 1, phase: draft
+  |/   desc: hg add dir1/a
+  o  rev: 0, phase: draft
+     desc: initial
 
   $ hg rebase -s . -d 1 --config experimental.copytrace.sourcecommitlimit=100
   rebasing 2:e8919e7df8d0 "mv dir1 dir2" (tip)
@@ -685,19 +688,19 @@
   $ mkdir foo
   $ hg mv a foo/bar
   $ hg ci -m "Moved a to foo/bar"
-  $ hg log -G -T 'changeset {node}\n desc {desc}, phase: {phase}\n'
-  @  changeset b4b0f7880e500b5c364a5f07b4a2b167de7a6fb0
-  |   desc Moved a to foo/bar, phase: draft
-  o  changeset 5f6d8a4bf34ab274ccc9f631c2536964b8a3666d
-  |   desc added b, phase: draft
-  | o  changeset 8b6e13696c38e8445a759516474640c2f8dddef6
-  |/    desc added more things to a, phase: draft
-  o  changeset 9092f1db7931481f93b37d5c9fbcfc341bcd7318
-      desc added a, phase: draft
+  $ hg pl
+  @  rev: 3, phase: draft
+  |  desc: Moved a to foo/bar
+  o  rev: 2, phase: draft
+  |  desc: added b
+  | o  rev: 1, phase: draft
+  |/   desc: added more things to a
+  o  rev: 0, phase: draft
+     desc: added a
 
 When the sourcecommitlimit is small and we have more drafts, we use heuristics only
 
-  $ hg rebase -s 8b6e13696 -d .
+  $ hg rebase -s 1 -d .
   rebasing 1:8b6e13696c38 "added more things to a"
   file 'a' was deleted in local [dest] but was modified in other [source].
   What do you want to do?
@@ -710,7 +713,7 @@
 
   $ hg rebase --abort
   rebase aborted
-  $ hg rebase -s 8b6e13696 -d . --config experimental.copytrace.sourcecommitlimit=100
+  $ hg rebase -s 1 -d . --config experimental.copytrace.sourcecommitlimit=100
   rebasing 1:8b6e13696c38 "added more things to a"
   merging foo/bar and a to foo/bar
   saved backup bundle to $TESTTMP/repo/repo/repo/.hg/strip-backup/8b6e13696c38-fc14ac83-rebase.hg
--- a/tests/test-debugcommands.t	Mon May 20 10:08:28 2019 +0200
+++ b/tests/test-debugcommands.t	Mon May 20 11:40:47 2019 -0400
@@ -545,6 +545,7 @@
   .hg/cache/tags2
   .hg/cache/rbc-revs-v1
   .hg/cache/rbc-names-v1
+  .hg/cache/hgtagsfnodes1
   .hg/cache/branch2-served
 
 Test debugcolor
--- a/tests/test-double-merge.t	Mon May 20 10:08:28 2019 +0200
+++ b/tests/test-double-merge.t	Mon May 20 11:40:47 2019 -0400
@@ -26,7 +26,6 @@
   summary:     cp foo bar; change both
   
   $ hg merge --debug
-    searching for copies back to rev 1
     unmatched files in other:
      bar
     all copies found (* = to merge, ! = divergent, % = renamed and deleted):
--- a/tests/test-fastannotate-hg.t	Mon May 20 10:08:28 2019 +0200
+++ b/tests/test-fastannotate-hg.t	Mon May 20 11:40:47 2019 -0400
@@ -1,6 +1,8 @@
 (this file is backported from core hg tests/test-annotate.t)
 
   $ cat >> $HGRCPATH << EOF
+  > [ui]
+  > merge = :merge3
   > [diff]
   > git=1
   > [extensions]
@@ -11,8 +13,6 @@
   > mainbranch=.
   > EOF
 
-  $ HGMERGE=true; export HGMERGE
-
 init
 
   $ hg init repo
@@ -157,8 +157,34 @@
   created new head
   $ hg merge
   merging b
-  0 files updated, 1 files merged, 0 files removed, 0 files unresolved
-  (branch merge, don't forget to commit)
+  warning: conflicts while merging b! (edit, then use 'hg resolve --mark')
+  0 files updated, 0 files merged, 0 files removed, 1 files unresolved
+  use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
+  [1]
+  $ cat b
+  a
+  a
+  a
+  <<<<<<< working copy: 5fbdc1152d97 - test: b2.1
+  b4
+  c
+  b5
+  ||||||| base
+  =======
+  b4
+  b5
+  b6
+  >>>>>>> merge rev:    37ec9f5c3d1f - test: b2
+  $ cat <<EOF > b
+  > a
+  > a
+  > a
+  > b4
+  > c
+  > b5
+  > EOF
+  $ hg resolve --mark -q
+  $ rm b.orig
   $ hg ci -mmergeb -d '3 0'
 
 annotate after merge
@@ -247,15 +273,31 @@
   > EOF
   $ hg ci -mc -d '3 0'
   created new head
+Work around the pure version not resolving the conflict like native code
+#if pure
+  $ hg merge
+  merging b
+  warning: conflicts while merging b! (edit, then use 'hg resolve --mark')
+  0 files updated, 0 files merged, 0 files removed, 1 files unresolved
+  use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
+  [1]
+  $ cat <<EOF > b
+  > a
+  > z
+  > a
+  > b4
+  > c
+  > b5
+  > EOF
+  $ hg resolve -m b
+  (no more unresolved files)
+  $ rm b.orig
+#else
   $ hg merge
   merging b
   0 files updated, 1 files merged, 0 files removed, 0 files unresolved
   (branch merge, don't forget to commit)
-  $ cat <<EOF >> b
-  > b4
-  > c
-  > b5
-  > EOF
+#endif
   $ echo d >> b
   $ hg ci -mmerge2 -d '4 0'
 
@@ -745,6 +787,19 @@
   $ echo 3 >> a
   $ hg commit -m 3 -q
   $ hg merge 2 -q
+  warning: conflicts while merging a! (edit, then use 'hg resolve --mark')
+  [1]
+  $ cat a
+  <<<<<<< working copy: 0a068f0261cf - test: 3
+  1
+  2
+  3
+  ||||||| base
+  1
+  2
+  =======
+  a
+  >>>>>>> merge rev:    9409851bc20a - test: a
   $ cat > a << EOF
   > b
   > 1
@@ -753,6 +808,7 @@
   > a
   > EOF
   $ hg resolve --mark -q
+  $ rm a.orig
   $ hg commit -m m
   $ hg annotate a
   4: b
--- a/tests/test-fastannotate-perfhack.t	Mon May 20 10:08:28 2019 +0200
+++ b/tests/test-fastannotate-perfhack.t	Mon May 20 11:40:47 2019 -0400
@@ -5,8 +5,6 @@
   > perfhack=1
   > EOF
 
-  $ HGMERGE=true; export HGMERGE
-
   $ hg init repo
   $ cd repo
 
--- a/tests/test-fastannotate-protocol.t	Mon May 20 10:08:28 2019 +0200
+++ b/tests/test-fastannotate-protocol.t	Mon May 20 11:40:47 2019 -0400
@@ -7,8 +7,6 @@
   > mainbranch=@
   > EOF
 
-  $ HGMERGE=true; export HGMERGE
-
 setup the server repo
 
   $ hg init repo-server
--- a/tests/test-fastannotate.t	Mon May 20 10:08:28 2019 +0200
+++ b/tests/test-fastannotate.t	Mon May 20 11:40:47 2019 -0400
@@ -3,8 +3,6 @@
   > fastannotate=
   > EOF
 
-  $ HGMERGE=true; export HGMERGE
-
   $ hg init repo
   $ cd repo
 
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-fix-metadata.t	Mon May 20 11:40:47 2019 -0400
@@ -0,0 +1,86 @@
+A python hook for "hg fix" that prints out the number of files and revisions
+that were affected, along with which fixer tools were applied. Also checks how
+many times it sees a specific key generated by one of the fixer tools defined
+below.
+
+  $ cat >> $TESTTMP/postfixhook.py <<EOF
+  > import collections
+  > def file(ui, repo, rev=None, path='', metadata=None, **kwargs):
+  >   ui.status('fixed %s in revision %d using %s\n' %
+  >             (path, rev, ', '.join(metadata.keys())))
+  > def summarize(ui, repo, replacements=None, wdirwritten=False,
+  >               metadata=None, **kwargs):
+  >     counts = collections.defaultdict(int)
+  >     keys = 0
+  >     for fixername, metadatalist in metadata.items():
+  >         for metadata in metadatalist:
+  >             if metadata is None:
+  >                 continue
+  >             counts[fixername] += 1
+  >             if 'key' in metadata:
+  >                 keys += 1
+  >     ui.status('saw "key" %d times\n' % (keys,))
+  >     for name, count in sorted(counts.items()):
+  >         ui.status('fixed %d files with %s\n' % (count, name))
+  >     if replacements:
+  >         ui.status('fixed %d revisions\n' % (len(replacements),))
+  >     if wdirwritten:
+  >         ui.status('fixed the working copy\n')
+  > EOF
+
+Some mock output for fixer tools that demonstrate what could go wrong with
+expecting the metadata output format.
+
+  $ printf 'new content\n' > $TESTTMP/missing
+  $ printf 'not valid json\0new content\n' > $TESTTMP/invalid
+  $ printf '{"key": "value"}\0new content\n' > $TESTTMP/valid
+
+Configure some fixer tools based on the output defined above, and enable the
+hooks defined above. Disable parallelism to make output of the parallel file
+processing phase stable.
+
+  $ cat >> $HGRCPATH <<EOF
+  > [extensions]
+  > fix =
+  > [fix]
+  > missing:command=cat $TESTTMP/missing
+  > missing:pattern=missing
+  > missing:metadata=true
+  > invalid:command=cat $TESTTMP/invalid
+  > invalid:pattern=invalid
+  > invalid:metadata=true
+  > valid:command=cat $TESTTMP/valid
+  > valid:pattern=valid
+  > valid:metadata=true
+  > [hooks]
+  > postfixfile = python:$TESTTMP/postfixhook.py:file
+  > postfix = python:$TESTTMP/postfixhook.py:summarize
+  > [worker]
+  > enabled=false
+  > EOF
+
+See what happens when we execute each of the fixer tools. Some print warnings,
+some write back to the file.
+
+  $ hg init repo
+  $ cd repo
+
+  $ printf "old content\n" > invalid
+  $ printf "old content\n" > missing
+  $ printf "old content\n" > valid
+  $ hg add -q
+
+  $ hg fix -w
+  ignored invalid output from fixer tool: invalid
+  ignored invalid output from fixer tool: missing
+  fixed valid in revision 2147483647 using valid
+  saw "key" 1 times
+  fixed 1 files with valid
+  fixed the working copy
+
+  $ cat missing invalid valid
+  old content
+  old content
+  new content
+
+  $ cd ..
--- a/tests/test-fix.t	Mon May 20 10:08:28 2019 +0200
+++ b/tests/test-fix.t	Mon May 20 11:40:47 2019 -0400
@@ -185,6 +185,36 @@
   tool may see different values for the arguments added by the :linerange
   suboption.
   
+  Each fixer tool is allowed to return some metadata in addition to the fixed
+  file content. The metadata must be placed before the file content on stdout,
+  separated from the file content by a zero byte. The metadata is parsed as a
+  JSON value (so, it should be UTF-8 encoded and contain no zero bytes). A fixer
+  tool is expected to produce this metadata encoding if and only if the
+  :metadata suboption is true:
+  
+    [fix]
+    tool:command = tool --prepend-json-metadata
+    tool:metadata = true
+  
+  The metadata values are passed to hooks, which can be used to print summaries
+  or perform other post-fixing work. The supported hooks are:
+  
+    "postfixfile"
+      Run once for each file in each revision where any fixer tools made changes
+      to the file content. Provides "$HG_REV" and "$HG_PATH" to identify the file,
+      and "$HG_METADATA" with a map of fixer names to metadata values from fixer
+      tools that affected the file. Fixer tools that didn't affect the file have a
+      valueof None. Only fixer tools that executed are present in the metadata.
+  
+    "postfix"
+      Run once after all files and revisions have been handled. Provides
+      "$HG_REPLACEMENTS" with information about what revisions were created and
+      made obsolete. Provides a boolean "$HG_WDIRWRITTEN" to indicate whether any
+      files in the working copy were updated. Provides a list "$HG_METADATA"
+      mapping fixer tool names to lists of metadata values returned from
+      executions that modified a file. This aggregates the same metadata
+      previously passed to the "postfixfile" hook.
+  
   list of commands:
   
    fix           rewrite file content in changesets or working directory
--- a/tests/test-glog-topological.t	Mon May 20 10:08:28 2019 +0200
+++ b/tests/test-glog-topological.t	Mon May 20 11:40:47 2019 -0400
@@ -114,3 +114,41 @@
   |/
   o  0
   
+
+Topological sort can be turned on via config
+
+  $ cat >> $HGRCPATH << EOF
+  > [experimental]
+  > log.topo=true
+  > EOF
+
+  $ hg log -G
+  o  8
+  |
+  o  3
+  |
+  o  2
+  |
+  o  1
+  |
+  | o  7
+  | |
+  | o  6
+  | |
+  | o  5
+  | |
+  | o  4
+  |/
+  o  0
+  
+Does not affect non-graph log
+  $ hg log -T '{rev}\n'
+  8
+  7
+  6
+  5
+  4
+  3
+  2
+  1
+  0
--- a/tests/test-glog.t	Mon May 20 10:08:28 2019 +0200
+++ b/tests/test-glog.t	Mon May 20 11:40:47 2019 -0400
@@ -3028,12 +3028,14 @@
        date:        Thu Jan 01 00:00:04 1970 +0000
        summary:     (4) merge two known; one immediate left, one immediate right
   
-Draw only part of a grandparent line differently with "<N><char>"; only the
-last N lines (for positive N) or everything but the first N lines (for
-negative N) along the current node use the style, the rest of the edge uses
-the parent edge styling.
+Previously, one could specify graphstyle.grandparent = <N><char> to draw <char>
+on only the last N lines (for positive N) or everything but the first N lines
+(for negative N), with the rest of the edge using the parent edge styling.
 
-Last 3 lines:
+This was removed, and this test now shows that muliple characters being
+specified in graphstyle.grandparent aren't treated specially (including in width
+calculations; there's no specific reason to *avoid* handling the width
+calculations, but it's difficult to do correctly and efficiently).
 
   $ cat << EOF >> $HGRCPATH
   > [experimental]
@@ -3043,77 +3045,77 @@
   > EOF
   $ hg log -G -r '36:18 & file("a")' -m
   @  changeset:   36:08a19a744424
-  !  branch:      branch
-  !  tag:         tip
-  !  parent:      35:9159c3644c5e
-  !  parent:      35:9159c3644c5e
-  !  user:        test
-  .  date:        Thu Jan 01 00:00:36 1970 +0000
-  .  summary:     (36) buggy merge: identical parents
-  .
+  3.  branch:      branch
+  3.  tag:         tip
+  3.  parent:      35:9159c3644c5e
+  3.  parent:      35:9159c3644c5e
+  3.  user:        test
+  3.  date:        Thu Jan 01 00:00:36 1970 +0000
+  3.  summary:     (36) buggy merge: identical parents
+  3.
   o    changeset:   32:d06dffa21a31
   !\   parent:      27:886ed638191b
-  ! !  parent:      31:621d83e11f67
-  ! !  user:        test
-  ! .  date:        Thu Jan 01 00:00:32 1970 +0000
-  ! .  summary:     (32) expand
-  ! .
-  o !  changeset:   31:621d83e11f67
-  !\!  parent:      21:d42a756af44d
-  ! !  parent:      30:6e11cd4b648f
-  ! !  user:        test
-  ! !  date:        Thu Jan 01 00:00:31 1970 +0000
-  ! !  summary:     (31) expand
-  ! !
-  o !    changeset:   30:6e11cd4b648f
+  ! 3.  parent:      31:621d83e11f67
+  ! 3.  user:        test
+  ! 3.  date:        Thu Jan 01 00:00:32 1970 +0000
+  ! 3.  summary:     (32) expand
+  ! 3.
+  o 3.  changeset:   31:621d83e11f67
+  !\3.  parent:      21:d42a756af44d
+  ! 3.  parent:      30:6e11cd4b648f
+  ! 3.  user:        test
+  ! 3.  date:        Thu Jan 01 00:00:31 1970 +0000
+  ! 3.  summary:     (31) expand
+  ! 3.
+  o 3.   changeset:   30:6e11cd4b648f
   !\ \   parent:      28:44ecd0b9ae99
-  ! ~ !  parent:      29:cd9bb2be7593
-  !   !  user:        test
-  !   !  date:        Thu Jan 01 00:00:30 1970 +0000
-  !   !  summary:     (30) expand
+  ! ~ 3.  parent:      29:cd9bb2be7593
+  !   3.  user:        test
+  !   3.  date:        Thu Jan 01 00:00:30 1970 +0000
+  !   3.  summary:     (30) expand
   !  /
-  o !    changeset:   28:44ecd0b9ae99
+  o 3.   changeset:   28:44ecd0b9ae99
   !\ \   parent:      1:6db2ef61d156
-  ! ~ !  parent:      26:7f25b6c2f0b9
-  !   !  user:        test
-  !   !  date:        Thu Jan 01 00:00:28 1970 +0000
-  !   !  summary:     (28) merge zero known
+  ! ~ 3.  parent:      26:7f25b6c2f0b9
+  !   3.  user:        test
+  !   3.  date:        Thu Jan 01 00:00:28 1970 +0000
+  !   3.  summary:     (28) merge zero known
   !  /
-  o !    changeset:   26:7f25b6c2f0b9
+  o 3.   changeset:   26:7f25b6c2f0b9
   !\ \   parent:      18:1aa84d96232a
-  ! ! !  parent:      25:91da8ed57247
-  ! ! !  user:        test
-  ! ! !  date:        Thu Jan 01 00:00:26 1970 +0000
-  ! ! !  summary:     (26) merge one known; far right
-  ! ! !
-  ! o !  changeset:   25:91da8ed57247
-  ! !\!  parent:      21:d42a756af44d
-  ! ! !  parent:      24:a9c19a3d96b7
-  ! ! !  user:        test
-  ! ! !  date:        Thu Jan 01 00:00:25 1970 +0000
-  ! ! !  summary:     (25) merge one known; far left
-  ! ! !
-  ! o !    changeset:   24:a9c19a3d96b7
+  ! ! 3.  parent:      25:91da8ed57247
+  ! ! 3.  user:        test
+  ! ! 3.  date:        Thu Jan 01 00:00:26 1970 +0000
+  ! ! 3.  summary:     (26) merge one known; far right
+  ! ! 3.
+  ! o 3.  changeset:   25:91da8ed57247
+  ! !\3.  parent:      21:d42a756af44d
+  ! ! 3.  parent:      24:a9c19a3d96b7
+  ! ! 3.  user:        test
+  ! ! 3.  date:        Thu Jan 01 00:00:25 1970 +0000
+  ! ! 3.  summary:     (25) merge one known; far left
+  ! ! 3.
+  ! o 3.   changeset:   24:a9c19a3d96b7
   ! !\ \   parent:      0:e6eb3150255d
-  ! ! ~ !  parent:      23:a01cddf0766d
-  ! !   !  user:        test
-  ! !   !  date:        Thu Jan 01 00:00:24 1970 +0000
-  ! !   !  summary:     (24) merge one known; immediate right
+  ! ! ~ 3.  parent:      23:a01cddf0766d
+  ! !   3.  user:        test
+  ! !   3.  date:        Thu Jan 01 00:00:24 1970 +0000
+  ! !   3.  summary:     (24) merge one known; immediate right
   ! !  /
-  ! o !    changeset:   23:a01cddf0766d
+  ! o 3.   changeset:   23:a01cddf0766d
   ! !\ \   parent:      1:6db2ef61d156
-  ! ! ~ !  parent:      22:e0d9cccacb5d
-  ! !   !  user:        test
-  ! !   !  date:        Thu Jan 01 00:00:23 1970 +0000
-  ! !   !  summary:     (23) merge one known; immediate left
+  ! ! ~ 3.  parent:      22:e0d9cccacb5d
+  ! !   3.  user:        test
+  ! !   3.  date:        Thu Jan 01 00:00:23 1970 +0000
+  ! !   3.  summary:     (23) merge one known; immediate left
   ! !  /
-  ! o !  changeset:   22:e0d9cccacb5d
-  !/!/   parent:      18:1aa84d96232a
-  ! !    parent:      21:d42a756af44d
-  ! !    user:        test
-  ! !    date:        Thu Jan 01 00:00:22 1970 +0000
-  ! !    summary:     (22) merge two known; one far left, one far right
-  ! !
+  ! o 3.  changeset:   22:e0d9cccacb5d
+  !/3./  parent:      18:1aa84d96232a
+  ! 3.   parent:      21:d42a756af44d
+  ! 3.   user:        test
+  ! 3.   date:        Thu Jan 01 00:00:22 1970 +0000
+  ! 3.   summary:     (22) merge two known; one far left, one far right
+  ! 3.
   ! o    changeset:   21:d42a756af44d
   ! !\   parent:      19:31ddc2c1573b
   ! ! !  parent:      20:d30ed6450e32
@@ -3142,7 +3144,8 @@
        date:        Thu Jan 01 00:00:18 1970 +0000
        summary:     (18) merge two known; two far left
   
-All but the first 3 lines:
+(This formerly tested "All but the first 3 lines", but is now showing that it's
+still not treated any differently):
 
   $ cat << EOF >> $HGRCPATH
   > [experimental]
@@ -3152,77 +3155,77 @@
   > EOF
   $ hg log -G -r '36:18 & file("a")' -m
   @  changeset:   36:08a19a744424
-  !  branch:      branch
-  !  tag:         tip
-  .  parent:      35:9159c3644c5e
-  .  parent:      35:9159c3644c5e
-  .  user:        test
-  .  date:        Thu Jan 01 00:00:36 1970 +0000
-  .  summary:     (36) buggy merge: identical parents
-  .
+  -3.  branch:      branch
+  -3.  tag:         tip
+  -3.  parent:      35:9159c3644c5e
+  -3.  parent:      35:9159c3644c5e
+  -3.  user:        test
+  -3.  date:        Thu Jan 01 00:00:36 1970 +0000
+  -3.  summary:     (36) buggy merge: identical parents
+  -3.
   o    changeset:   32:d06dffa21a31
   !\   parent:      27:886ed638191b
-  ! !  parent:      31:621d83e11f67
-  ! .  user:        test
-  ! .  date:        Thu Jan 01 00:00:32 1970 +0000
-  ! .  summary:     (32) expand
-  ! .
-  o !  changeset:   31:621d83e11f67
-  !\!  parent:      21:d42a756af44d
-  ! !  parent:      30:6e11cd4b648f
-  ! !  user:        test
-  ! !  date:        Thu Jan 01 00:00:31 1970 +0000
-  ! !  summary:     (31) expand
-  ! !
-  o !    changeset:   30:6e11cd4b648f
+  ! -3.  parent:      31:621d83e11f67
+  ! -3.  user:        test
+  ! -3.  date:        Thu Jan 01 00:00:32 1970 +0000
+  ! -3.  summary:     (32) expand
+  ! -3.
+  o -3.  changeset:   31:621d83e11f67
+  !\-3.  parent:      21:d42a756af44d
+  ! -3.  parent:      30:6e11cd4b648f
+  ! -3.  user:        test
+  ! -3.  date:        Thu Jan 01 00:00:31 1970 +0000
+  ! -3.  summary:     (31) expand
+  ! -3.
+  o -3.  changeset:   30:6e11cd4b648f
   !\ \   parent:      28:44ecd0b9ae99
-  ! ~ !  parent:      29:cd9bb2be7593
-  !   !  user:        test
-  !   !  date:        Thu Jan 01 00:00:30 1970 +0000
-  !   !  summary:     (30) expand
+  ! ~ -3.  parent:      29:cd9bb2be7593
+  !   -3.  user:        test
+  !   -3.  date:        Thu Jan 01 00:00:30 1970 +0000
+  !   -3.  summary:     (30) expand
   !  /
-  o !    changeset:   28:44ecd0b9ae99
+  o -3.  changeset:   28:44ecd0b9ae99
   !\ \   parent:      1:6db2ef61d156
-  ! ~ !  parent:      26:7f25b6c2f0b9
-  !   !  user:        test
-  !   !  date:        Thu Jan 01 00:00:28 1970 +0000
-  !   !  summary:     (28) merge zero known
+  ! ~ -3.  parent:      26:7f25b6c2f0b9
+  !   -3.  user:        test
+  !   -3.  date:        Thu Jan 01 00:00:28 1970 +0000
+  !   -3.  summary:     (28) merge zero known
   !  /
-  o !    changeset:   26:7f25b6c2f0b9
+  o -3.  changeset:   26:7f25b6c2f0b9
   !\ \   parent:      18:1aa84d96232a
-  ! ! !  parent:      25:91da8ed57247
-  ! ! !  user:        test
-  ! ! !  date:        Thu Jan 01 00:00:26 1970 +0000
-  ! ! !  summary:     (26) merge one known; far right
-  ! ! !
-  ! o !  changeset:   25:91da8ed57247
-  ! !\!  parent:      21:d42a756af44d
-  ! ! !  parent:      24:a9c19a3d96b7
-  ! ! !  user:        test
-  ! ! !  date:        Thu Jan 01 00:00:25 1970 +0000
-  ! ! !  summary:     (25) merge one known; far left
-  ! ! !
-  ! o !    changeset:   24:a9c19a3d96b7
+  ! ! -3.  parent:      25:91da8ed57247
+  ! ! -3.  user:        test
+  ! ! -3.  date:        Thu Jan 01 00:00:26 1970 +0000
+  ! ! -3.  summary:     (26) merge one known; far right
+  ! ! -3.
+  ! o -3.  changeset:   25:91da8ed57247
+  ! !\-3.  parent:      21:d42a756af44d
+  ! ! -3.  parent:      24:a9c19a3d96b7
+  ! ! -3.  user:        test
+  ! ! -3.  date:        Thu Jan 01 00:00:25 1970 +0000
+  ! ! -3.  summary:     (25) merge one known; far left
+  ! ! -3.
+  ! o -3.  changeset:   24:a9c19a3d96b7
   ! !\ \   parent:      0:e6eb3150255d
-  ! ! ~ !  parent:      23:a01cddf0766d
-  ! !   !  user:        test
-  ! !   !  date:        Thu Jan 01 00:00:24 1970 +0000
-  ! !   !  summary:     (24) merge one known; immediate right
+  ! ! ~ -3.  parent:      23:a01cddf0766d
+  ! !   -3.  user:        test
+  ! !   -3.  date:        Thu Jan 01 00:00:24 1970 +0000
+  ! !   -3.  summary:     (24) merge one known; immediate right
   ! !  /
-  ! o !    changeset:   23:a01cddf0766d
+  ! o -3.  changeset:   23:a01cddf0766d
   ! !\ \   parent:      1:6db2ef61d156
-  ! ! ~ !  parent:      22:e0d9cccacb5d
-  ! !   !  user:        test
-  ! !   !  date:        Thu Jan 01 00:00:23 1970 +0000
-  ! !   !  summary:     (23) merge one known; immediate left
+  ! ! ~ -3.  parent:      22:e0d9cccacb5d
+  ! !   -3.  user:        test
+  ! !   -3.  date:        Thu Jan 01 00:00:23 1970 +0000
+  ! !   -3.  summary:     (23) merge one known; immediate left
   ! !  /
-  ! o !  changeset:   22:e0d9cccacb5d
-  !/!/   parent:      18:1aa84d96232a
-  ! !    parent:      21:d42a756af44d
-  ! !    user:        test
-  ! !    date:        Thu Jan 01 00:00:22 1970 +0000
-  ! !    summary:     (22) merge two known; one far left, one far right
-  ! !
+  ! o -3.  changeset:   22:e0d9cccacb5d
+  !/-3./  parent:      18:1aa84d96232a
+  ! -3.  parent:      21:d42a756af44d
+  ! -3.  user:        test
+  ! -3.  date:        Thu Jan 01 00:00:22 1970 +0000
+  ! -3.  summary:     (22) merge two known; one far left, one far right
+  ! -3.
   ! o    changeset:   21:d42a756af44d
   ! !\   parent:      19:31ddc2c1573b
   ! ! !  parent:      20:d30ed6450e32
--- a/tests/test-graft.t	Mon May 20 10:08:28 2019 +0200
+++ b/tests/test-graft.t	Mon May 20 11:40:47 2019 -0400
@@ -75,6 +75,8 @@
 
   $ hg graft -r 2 --base 3
   grafting 2:5c095ad7e90f "2"
+  note: possible conflict - c was deleted and renamed to:
+   a
   note: graft of 2:5c095ad7e90f created no changes to commit
 
 Can't continue without starting:
@@ -199,7 +201,6 @@
   scanning for duplicate grafts
   skipping revision 2:5c095ad7e90f (already grafted to 7:ef0ef43d49e7)
   grafting 1:5d205f8b35b6 "1"
-    searching for copies back to rev 1
     unmatched files in local:
      b
     all copies found (* = to merge, ! = divergent, % = renamed and deleted):
@@ -221,9 +222,9 @@
   committing changelog
   updating the branch cache
   grafting 5:97f8bfe72746 "5"
-    searching for copies back to rev 1
-    unmatched files in other (from topological common ancestor):
-     c
+    all copies found (* = to merge, ! = divergent, % = renamed and deleted):
+     src: 'c' -> dst: 'b' 
+    checking for directory renames
   resolving manifests
    branchmerge: True, force: True, partial: False
    ancestor: 4c60f11aa304, local: 6b9e5368ca4e+, remote: 97f8bfe72746
@@ -237,9 +238,9 @@
   $ HGEDITOR=cat hg graft 4 3 --log --debug
   scanning for duplicate grafts
   grafting 4:9c233e8e184d "4"
-    searching for copies back to rev 1
-    unmatched files in other (from topological common ancestor):
-     c
+    all copies found (* = to merge, ! = divergent, % = renamed and deleted):
+     src: 'c' -> dst: 'b' 
+    checking for directory renames
   resolving manifests
    branchmerge: True, force: True, partial: False
    ancestor: 4c60f11aa304, local: 1905859650ec+, remote: 9c233e8e184d
@@ -744,11 +745,9 @@
   $ hg graft -q 13 --debug
   scanning for duplicate grafts
   grafting 13:7a4785234d87 "2"
-    searching for copies back to rev 12
-    unmatched files in other (from topological common ancestor):
-     g
-    unmatched files new in both:
-     b
+    all copies found (* = to merge, ! = divergent, % = renamed and deleted):
+     src: 'a' -> dst: 'b' *
+    checking for directory renames
   resolving manifests
    branchmerge: True, force: True, partial: False
    ancestor: b592ea63bb0c, local: 7e61b508e709+, remote: 7a4785234d87
@@ -756,7 +755,7 @@
   committing files:
   b
   warning: can't find ancestor for 'b' copied from 'a'!
-  reusing manifest form p1 (listed files actually unchanged)
+  reusing manifest from p1 (listed files actually unchanged)
   committing changelog
   updating the branch cache
   $ hg log -r 'destination(13)'
@@ -969,7 +968,6 @@
 A.4 has a degenerate case a<-b<-a->a, where checkcopies isn't needed at all.
 A.5 has a special case a<-b<-b->a, which is treated like a<-b->a in a merge.
 A.5 has issue5343 as a special case.
-TODO: add test coverage for A.5
 A.6 has a special case a<-a<-b->a. Here, checkcopies will find a spurious
 incomplete divergence, which is in fact complete. This is handled later in
 mergecopies.
@@ -1072,13 +1070,32 @@
   $ hg mv f4a f4e
   $ hg mv f5a f5b
   $ hg ci -qAm "E0"
+  $ hg up -q "min(desc("A0"))"
+  $ hg cp f1a f1f
+  $ hg ci -qAm "F0"
+  $ hg up -q "min(desc("A0"))"
+  $ hg cp f1a f1g
+  $ echo c1g > f1g
+  $ hg ci -qAm "G0"
   $ hg log -G
-  @  changeset:   6:6bd1736cab86
+  @  changeset:   8:ba67f08fb15a
   |  tag:         tip
   |  parent:      0:11f7a1b56675
   |  user:        test
   |  date:        Thu Jan 01 00:00:00 1970 +0000
-  |  summary:     E0
+  |  summary:     G0
+  |
+  | o  changeset:   7:d376ab0d7fda
+  |/   parent:      0:11f7a1b56675
+  |    user:        test
+  |    date:        Thu Jan 01 00:00:00 1970 +0000
+  |    summary:     F0
+  |
+  | o  changeset:   6:6bd1736cab86
+  |/   parent:      0:11f7a1b56675
+  |    user:        test
+  |    date:        Thu Jan 01 00:00:00 1970 +0000
+  |    summary:     E0
   |
   | o  changeset:   5:560daee679da
   | |  user:        test
@@ -1115,11 +1132,11 @@
 Test the cases A.4 (f1x), the "ping-pong" special case of A.7 (f5x),
 and A.3 with a local content change to be preserved (f2x).
 
+  $ hg up -q "desc("E0")"
   $ HGEDITOR="echo C2 >" hg graft -r 'desc("C0")' --edit
   grafting 2:f58c7e2b28fa "C0"
   merging f1e and f1b to f1e
   merging f2a and f2c to f2c
-  merging f5b and f5a to f5a
 
 Test the cases A.1 (f4x) and A.7 (f3x).
 
@@ -1131,93 +1148,129 @@
   merging f4e and f4a to f4e
   warning: can't find ancestor for 'f3d' copied from 'f3b'!
 
+  $ hg cat f2c
+  c2e
+
+Test the case A.5 (move case, f1x).
+
+  $ hg up -q "desc("C0")"
+BROKEN: Shouldn't get the warning about missing ancestor
+  $ HGEDITOR="echo E1 >" hg graft -r 'desc("E0")' --edit
+  grafting 6:6bd1736cab86 "E0"
+  note: possible conflict - f1a was renamed multiple times to:
+   f1b
+   f1e
+  note: possible conflict - f3a was renamed multiple times to:
+   f3b
+   f3e
+  merging f2c and f2a to f2c
+  merging f5a and f5b to f5b
+  warning: can't find ancestor for 'f1e' copied from 'f1a'!
+  warning: can't find ancestor for 'f3e' copied from 'f3a'!
+  $ cat f1e
+  c1a
+
+Test the case A.5 (copy case, f1x).
+
+  $ hg up -q "desc("C0")"
+BROKEN: Shouldn't get the warning about missing ancestor
+  $ HGEDITOR="echo F1 >" hg graft -r 'desc("F0")' --edit
+  grafting 7:d376ab0d7fda "F0"
+  warning: can't find ancestor for 'f1f' copied from 'f1a'!
+BROKEN: f1f should be marked a copy from f1b
+  $ hg st --copies --change .
+  A f1f
+BROKEN: f1f should have the new content from f1b (i.e. "c1c")
+  $ cat f1f
+  c1a
+
+Test the case A.5 (copy+modify case, f1x).
+
+  $ hg up -q "desc("C0")"
+BROKEN: We should get a merge conflict from the 3-way merge between f1b in C0
+(content "c1c") and f1g in G0 (content "c1g") with f1a in A0 as base (content
+"c1a")
+  $ HGEDITOR="echo G1 >" hg graft -r 'desc("G0")' --edit
+  grafting 8:ba67f08fb15a "G0"
+  warning: can't find ancestor for 'f1g' copied from 'f1a'!
+
 Check the results of the grafts tested
 
   $ hg log -CGv --patch --git
-  @  changeset:   8:93ee502e8b0a
+  @  changeset:   13:ef3adf6c20a4
   |  tag:         tip
+  |  parent:      2:f58c7e2b28fa
   |  user:        test
   |  date:        Thu Jan 01 00:00:00 1970 +0000
-  |  files:       f3d f4e
+  |  files:       f1g
   |  description:
-  |  D2
+  |  G1
   |
   |
-  |  diff --git a/f3d b/f3d
+  |  diff --git a/f1g b/f1g
   |  new file mode 100644
   |  --- /dev/null
-  |  +++ b/f3d
+  |  +++ b/f1g
   |  @@ -0,0 +1,1 @@
-  |  +c3a
-  |  diff --git a/f4e b/f4e
-  |  --- a/f4e
-  |  +++ b/f4e
-  |  @@ -1,1 +1,1 @@
-  |  -c4a
-  |  +c4d
+  |  +c1g
   |
-  o  changeset:   7:539cf145f496
-  |  user:        test
-  |  date:        Thu Jan 01 00:00:00 1970 +0000
-  |  files:       f1e f2a f2c f5a f5b
-  |  copies:      f2c (f2a) f5a (f5b)
-  |  description:
-  |  C2
+  | o  changeset:   12:b5542d755b54
+  |/   parent:      2:f58c7e2b28fa
+  |    user:        test
+  |    date:        Thu Jan 01 00:00:00 1970 +0000
+  |    files:       f1f
+  |    description:
+  |    F1
   |
   |
-  |  diff --git a/f1e b/f1e
-  |  --- a/f1e
-  |  +++ b/f1e
-  |  @@ -1,1 +1,1 @@
-  |  -c1a
-  |  +c1c
-  |  diff --git a/f2a b/f2c
-  |  rename from f2a
-  |  rename to f2c
-  |  diff --git a/f5b b/f5a
-  |  rename from f5b
-  |  rename to f5a
-  |  --- a/f5b
-  |  +++ b/f5a
-  |  @@ -1,1 +1,1 @@
-  |  -c5a
-  |  +c5c
+  |    diff --git a/f1f b/f1f
+  |    new file mode 100644
+  |    --- /dev/null
+  |    +++ b/f1f
+  |    @@ -0,0 +1,1 @@
+  |    +c1a
   |
-  o  changeset:   6:6bd1736cab86
-  |  parent:      0:11f7a1b56675
-  |  user:        test
-  |  date:        Thu Jan 01 00:00:00 1970 +0000
-  |  files:       f1a f1e f2a f3a f3e f4a f4e f5a f5b
-  |  copies:      f1e (f1a) f3e (f3a) f4e (f4a) f5b (f5a)
-  |  description:
-  |  E0
+  | o  changeset:   11:f8a162271246
+  |/   parent:      2:f58c7e2b28fa
+  |    user:        test
+  |    date:        Thu Jan 01 00:00:00 1970 +0000
+  |    files:       f1e f2c f3e f4a f4e f5a f5b
+  |    copies:      f4e (f4a) f5b (f5a)
+  |    description:
+  |    E1
   |
   |
-  |  diff --git a/f1a b/f1e
-  |  rename from f1a
-  |  rename to f1e
-  |  diff --git a/f2a b/f2a
-  |  --- a/f2a
-  |  +++ b/f2a
-  |  @@ -1,1 +1,1 @@
-  |  -c2a
-  |  +c2e
-  |  diff --git a/f3a b/f3e
-  |  rename from f3a
-  |  rename to f3e
-  |  diff --git a/f4a b/f4e
-  |  rename from f4a
-  |  rename to f4e
-  |  diff --git a/f5a b/f5b
-  |  rename from f5a
-  |  rename to f5b
+  |    diff --git a/f1e b/f1e
+  |    new file mode 100644
+  |    --- /dev/null
+  |    +++ b/f1e
+  |    @@ -0,0 +1,1 @@
+  |    +c1a
+  |    diff --git a/f2c b/f2c
+  |    --- a/f2c
+  |    +++ b/f2c
+  |    @@ -1,1 +1,1 @@
+  |    -c2a
+  |    +c2e
+  |    diff --git a/f3e b/f3e
+  |    new file mode 100644
+  |    --- /dev/null
+  |    +++ b/f3e
+  |    @@ -0,0 +1,1 @@
+  |    +c3a
+  |    diff --git a/f4a b/f4e
+  |    rename from f4a
+  |    rename to f4e
+  |    diff --git a/f5a b/f5b
+  |    rename from f5a
+  |    rename to f5b
   |
-  | o  changeset:   5:560daee679da
+  | o  changeset:   10:93ee502e8b0a
   | |  user:        test
   | |  date:        Thu Jan 01 00:00:00 1970 +0000
-  | |  files:       f3d f4a
+  | |  files:       f3d f4e
   | |  description:
-  | |  D1
+  | |  D2
   | |
   | |
   | |  diff --git a/f3d b/f3d
@@ -1226,59 +1279,170 @@
   | |  +++ b/f3d
   | |  @@ -0,0 +1,1 @@
   | |  +c3a
-  | |  diff --git a/f4a b/f4a
-  | |  --- a/f4a
-  | |  +++ b/f4a
+  | |  diff --git a/f4e b/f4e
+  | |  --- a/f4e
+  | |  +++ b/f4e
   | |  @@ -1,1 +1,1 @@
   | |  -c4a
   | |  +c4d
   | |
-  | o  changeset:   4:c9763722f9bd
-  |/   parent:      0:11f7a1b56675
-  |    user:        test
-  |    date:        Thu Jan 01 00:00:00 1970 +0000
-  |    files:       f1a f2a f2c f5a
-  |    copies:      f2c (f2a)
-  |    description:
-  |    C1
-  |
-  |
-  |    diff --git a/f1a b/f1a
-  |    --- a/f1a
-  |    +++ b/f1a
-  |    @@ -1,1 +1,1 @@
-  |    -c1a
-  |    +c1c
-  |    diff --git a/f2a b/f2c
-  |    rename from f2a
-  |    rename to f2c
-  |    diff --git a/f5a b/f5a
-  |    --- a/f5a
-  |    +++ b/f5a
-  |    @@ -1,1 +1,1 @@
-  |    -c5a
-  |    +c5c
-  |
-  | o  changeset:   3:b69f5839d2d9
+  | o  changeset:   9:539cf145f496
+  | |  parent:      6:6bd1736cab86
   | |  user:        test
   | |  date:        Thu Jan 01 00:00:00 1970 +0000
-  | |  files:       f3b f3d f4a
-  | |  copies:      f3d (f3b)
+  | |  files:       f1e f2a f2c f5a f5b
+  | |  copies:      f2c (f2a) f5a (f5b)
   | |  description:
-  | |  D0
+  | |  C2
+  | |
+  | |
+  | |  diff --git a/f1e b/f1e
+  | |  --- a/f1e
+  | |  +++ b/f1e
+  | |  @@ -1,1 +1,1 @@
+  | |  -c1a
+  | |  +c1c
+  | |  diff --git a/f2a b/f2c
+  | |  rename from f2a
+  | |  rename to f2c
+  | |  diff --git a/f5b b/f5a
+  | |  rename from f5b
+  | |  rename to f5a
+  | |  --- a/f5b
+  | |  +++ b/f5a
+  | |  @@ -1,1 +1,1 @@
+  | |  -c5a
+  | |  +c5c
+  | |
+  | | o  changeset:   8:ba67f08fb15a
+  | | |  parent:      0:11f7a1b56675
+  | | |  user:        test
+  | | |  date:        Thu Jan 01 00:00:00 1970 +0000
+  | | |  files:       f1g
+  | | |  copies:      f1g (f1a)
+  | | |  description:
+  | | |  G0
+  | | |
+  | | |
+  | | |  diff --git a/f1a b/f1g
+  | | |  copy from f1a
+  | | |  copy to f1g
+  | | |  --- a/f1a
+  | | |  +++ b/f1g
+  | | |  @@ -1,1 +1,1 @@
+  | | |  -c1a
+  | | |  +c1g
+  | | |
+  | | | o  changeset:   7:d376ab0d7fda
+  | | |/   parent:      0:11f7a1b56675
+  | | |    user:        test
+  | | |    date:        Thu Jan 01 00:00:00 1970 +0000
+  | | |    files:       f1f
+  | | |    copies:      f1f (f1a)
+  | | |    description:
+  | | |    F0
+  | | |
+  | | |
+  | | |    diff --git a/f1a b/f1f
+  | | |    copy from f1a
+  | | |    copy to f1f
+  | | |
+  | o |  changeset:   6:6bd1736cab86
+  | |/   parent:      0:11f7a1b56675
+  | |    user:        test
+  | |    date:        Thu Jan 01 00:00:00 1970 +0000
+  | |    files:       f1a f1e f2a f3a f3e f4a f4e f5a f5b
+  | |    copies:      f1e (f1a) f3e (f3a) f4e (f4a) f5b (f5a)
+  | |    description:
+  | |    E0
   | |
   | |
-  | |  diff --git a/f3b b/f3d
-  | |  rename from f3b
-  | |  rename to f3d
-  | |  diff --git a/f4a b/f4a
-  | |  --- a/f4a
-  | |  +++ b/f4a
-  | |  @@ -1,1 +1,1 @@
-  | |  -c4a
-  | |  +c4d
+  | |    diff --git a/f1a b/f1e
+  | |    rename from f1a
+  | |    rename to f1e
+  | |    diff --git a/f2a b/f2a
+  | |    --- a/f2a
+  | |    +++ b/f2a
+  | |    @@ -1,1 +1,1 @@
+  | |    -c2a
+  | |    +c2e
+  | |    diff --git a/f3a b/f3e
+  | |    rename from f3a
+  | |    rename to f3e
+  | |    diff --git a/f4a b/f4e
+  | |    rename from f4a
+  | |    rename to f4e
+  | |    diff --git a/f5a b/f5b
+  | |    rename from f5a
+  | |    rename to f5b
   | |
-  | o  changeset:   2:f58c7e2b28fa
+  | | o  changeset:   5:560daee679da
+  | | |  user:        test
+  | | |  date:        Thu Jan 01 00:00:00 1970 +0000
+  | | |  files:       f3d f4a
+  | | |  description:
+  | | |  D1
+  | | |
+  | | |
+  | | |  diff --git a/f3d b/f3d
+  | | |  new file mode 100644
+  | | |  --- /dev/null
+  | | |  +++ b/f3d
+  | | |  @@ -0,0 +1,1 @@
+  | | |  +c3a
+  | | |  diff --git a/f4a b/f4a
+  | | |  --- a/f4a
+  | | |  +++ b/f4a
+  | | |  @@ -1,1 +1,1 @@
+  | | |  -c4a
+  | | |  +c4d
+  | | |
+  | | o  changeset:   4:c9763722f9bd
+  | |/   parent:      0:11f7a1b56675
+  | |    user:        test
+  | |    date:        Thu Jan 01 00:00:00 1970 +0000
+  | |    files:       f1a f2a f2c f5a
+  | |    copies:      f2c (f2a)
+  | |    description:
+  | |    C1
+  | |
+  | |
+  | |    diff --git a/f1a b/f1a
+  | |    --- a/f1a
+  | |    +++ b/f1a
+  | |    @@ -1,1 +1,1 @@
+  | |    -c1a
+  | |    +c1c
+  | |    diff --git a/f2a b/f2c
+  | |    rename from f2a
+  | |    rename to f2c
+  | |    diff --git a/f5a b/f5a
+  | |    --- a/f5a
+  | |    +++ b/f5a
+  | |    @@ -1,1 +1,1 @@
+  | |    -c5a
+  | |    +c5c
+  | |
+  +---o  changeset:   3:b69f5839d2d9
+  | |    user:        test
+  | |    date:        Thu Jan 01 00:00:00 1970 +0000
+  | |    files:       f3b f3d f4a
+  | |    copies:      f3d (f3b)
+  | |    description:
+  | |    D0
+  | |
+  | |
+  | |    diff --git a/f3b b/f3d
+  | |    rename from f3b
+  | |    rename to f3d
+  | |    diff --git a/f4a b/f4a
+  | |    --- a/f4a
+  | |    +++ b/f4a
+  | |    @@ -1,1 +1,1 @@
+  | |    -c4a
+  | |    +c4d
+  | |
+  o |  changeset:   2:f58c7e2b28fa
   | |  user:        test
   | |  date:        Thu Jan 01 00:00:00 1970 +0000
   | |  files:       f1b f2a f2c f5a f5b
@@ -1305,7 +1469,7 @@
   | |  -c5a
   | |  +c5c
   | |
-  | o  changeset:   1:3d7bba921b5d
+  o |  changeset:   1:3d7bba921b5d
   |/   user:        test
   |    date:        Thu Jan 01 00:00:00 1970 +0000
   |    files:       f1a f1b f3a f3b f5a f5b
@@ -1363,9 +1527,6 @@
      @@ -0,0 +1,1 @@
      +c5a
   
-  $ hg cat f2c
-  c2e
-
 Check superfluous filemerge of files renamed in the past but untouched by graft
 
   $ echo a > a
--- a/tests/test-help.t	Mon May 20 10:08:28 2019 +0200
+++ b/tests/test-help.t	Mon May 20 11:40:47 2019 -0400
@@ -615,6 +615,8 @@
   
       Returns 0 on success, 1 if errors are encountered.
   
+  options:
+  
   (some details hidden, use --verbose to show complete help)
 
   $ hg help diff
@@ -1509,6 +1511,10 @@
   
       "sparse-revlog"
   
+      "revlog-compression"
+  
+      "bookmarks-in-store"
+  
       "profiling"
       -----------
   
--- a/tests/test-issue1802.t	Mon May 20 10:08:28 2019 +0200
+++ b/tests/test-issue1802.t	Mon May 20 11:40:47 2019 -0400
@@ -52,7 +52,6 @@
 Simulate a Windows merge:
 
   $ hg --config extensions.n=$TESTTMP/noexec.py merge --debug
-    searching for copies back to rev 1
     unmatched files in local:
      b
   resolving manifests
--- a/tests/test-issue522.t	Mon May 20 10:08:28 2019 +0200
+++ b/tests/test-issue522.t	Mon May 20 11:40:47 2019 -0400
@@ -25,7 +25,6 @@
   $ hg ci -qAm 'add bar'
 
   $ hg merge --debug
-    searching for copies back to rev 1
     unmatched files in local:
      bar
   resolving manifests
--- a/tests/test-issue672.t	Mon May 20 10:08:28 2019 +0200
+++ b/tests/test-issue672.t	Mon May 20 11:40:47 2019 -0400
@@ -25,7 +25,6 @@
   created new head
 
   $ hg merge --debug 1
-    searching for copies back to rev 1
     unmatched files in other:
      1a
     all copies found (* = to merge, ! = divergent, % = renamed and deleted):
@@ -54,7 +53,6 @@
   1 files updated, 0 files merged, 1 files removed, 0 files unresolved
 
   $ hg merge -y --debug 4
-    searching for copies back to rev 1
     unmatched files in local:
      1a
     all copies found (* = to merge, ! = divergent, % = renamed and deleted):
@@ -77,7 +75,6 @@
   1 files updated, 0 files merged, 1 files removed, 0 files unresolved
 
   $ hg merge -y --debug 3
-    searching for copies back to rev 1
     unmatched files in other:
      1a
     all copies found (* = to merge, ! = divergent, % = renamed and deleted):
--- a/tests/test-log.t	Mon May 20 10:08:28 2019 +0200
+++ b/tests/test-log.t	Mon May 20 11:40:47 2019 -0400
@@ -1081,7 +1081,7 @@
   $ cd ..
 
 log --follow --patch FILE in repository where linkrev isn't trustworthy
-(issue5376)
+(issue5376, issue6124)
 
   $ hg init follow-dup
   $ cd follow-dup
@@ -1129,6 +1129,16 @@
   @@ -0,0 +1,1 @@
   +0
   
+  $ hg log -pr . a
+  === 3: a3
+  diff -r 4ea02ba94d66 -r e7a6331a34f0 a
+  --- a/a
+  +++ b/a
+  @@ -1,2 +1,3 @@
+   0
+   1
+  +3
+  
 
  fctx.introrev() == 2, but fctx.linkrev() == 1
 
@@ -1150,6 +1160,9 @@
   +0
   
 
+BROKEN: should show the same diff as for rev 2 above
+  $ hg log -pr . a
+
   $ cd ..
 
 Multiple copy sources of a file:
--- a/tests/test-match.py	Mon May 20 10:08:28 2019 +0200
+++ b/tests/test-match.py	Mon May 20 11:40:47 2019 -0400
@@ -13,36 +13,36 @@
 
     def testVisitdir(self):
         m = matchmod.basematcher()
-        self.assertTrue(m.visitdir(b'.'))
+        self.assertTrue(m.visitdir(b''))
         self.assertTrue(m.visitdir(b'dir'))
 
     def testVisitchildrenset(self):
         m = matchmod.basematcher()
-        self.assertEqual(m.visitchildrenset(b'.'), b'this')
+        self.assertEqual(m.visitchildrenset(b''), b'this')
         self.assertEqual(m.visitchildrenset(b'dir'), b'this')
 
 class AlwaysMatcherTests(unittest.TestCase):
 
     def testVisitdir(self):
         m = matchmod.alwaysmatcher()
-        self.assertEqual(m.visitdir(b'.'), b'all')
+        self.assertEqual(m.visitdir(b''), b'all')
         self.assertEqual(m.visitdir(b'dir'), b'all')
 
     def testVisitchildrenset(self):
         m = matchmod.alwaysmatcher()
-        self.assertEqual(m.visitchildrenset(b'.'), b'all')
+        self.assertEqual(m.visitchildrenset(b''), b'all')
         self.assertEqual(m.visitchildrenset(b'dir'), b'all')
 
 class NeverMatcherTests(unittest.TestCase):
 
     def testVisitdir(self):
         m = matchmod.nevermatcher()
-        self.assertFalse(m.visitdir(b'.'))
+        self.assertFalse(m.visitdir(b''))
         self.assertFalse(m.visitdir(b'dir'))
 
     def testVisitchildrenset(self):
         m = matchmod.nevermatcher()
-        self.assertEqual(m.visitchildrenset(b'.'), set())
+        self.assertEqual(m.visitchildrenset(b''), set())
         self.assertEqual(m.visitchildrenset(b'dir'), set())
 
 class PredicateMatcherTests(unittest.TestCase):
@@ -51,12 +51,12 @@
 
     def testVisitdir(self):
         m = matchmod.predicatematcher(lambda *a: False)
-        self.assertTrue(m.visitdir(b'.'))
+        self.assertTrue(m.visitdir(b''))
         self.assertTrue(m.visitdir(b'dir'))
 
     def testVisitchildrenset(self):
         m = matchmod.predicatematcher(lambda *a: False)
-        self.assertEqual(m.visitchildrenset(b'.'), b'this')
+        self.assertEqual(m.visitchildrenset(b''), b'this')
         self.assertEqual(m.visitchildrenset(b'dir'), b'this')
 
 class PatternMatcherTests(unittest.TestCase):
@@ -64,7 +64,7 @@
     def testVisitdirPrefix(self):
         m = matchmod.match(b'x', b'', patterns=[b'path:dir/subdir'])
         assert isinstance(m, matchmod.patternmatcher)
-        self.assertTrue(m.visitdir(b'.'))
+        self.assertTrue(m.visitdir(b''))
         self.assertTrue(m.visitdir(b'dir'))
         self.assertEqual(m.visitdir(b'dir/subdir'), b'all')
         # OPT: This should probably be 'all' if its parent is?
@@ -74,7 +74,7 @@
     def testVisitchildrensetPrefix(self):
         m = matchmod.match(b'x', b'', patterns=[b'path:dir/subdir'])
         assert isinstance(m, matchmod.patternmatcher)
-        self.assertEqual(m.visitchildrenset(b'.'), b'this')
+        self.assertEqual(m.visitchildrenset(b''), b'this')
         self.assertEqual(m.visitchildrenset(b'dir'), b'this')
         self.assertEqual(m.visitchildrenset(b'dir/subdir'), b'all')
         # OPT: This should probably be 'all' if its parent is?
@@ -84,28 +84,28 @@
     def testVisitdirRootfilesin(self):
         m = matchmod.match(b'x', b'', patterns=[b'rootfilesin:dir/subdir'])
         assert isinstance(m, matchmod.patternmatcher)
-        self.assertTrue(m.visitdir(b'.'))
         self.assertFalse(m.visitdir(b'dir/subdir/x'))
         self.assertFalse(m.visitdir(b'folder'))
         # FIXME: These should probably be True.
+        self.assertFalse(m.visitdir(b''))
         self.assertFalse(m.visitdir(b'dir'))
         self.assertFalse(m.visitdir(b'dir/subdir'))
 
     def testVisitchildrensetRootfilesin(self):
         m = matchmod.match(b'x', b'', patterns=[b'rootfilesin:dir/subdir'])
         assert isinstance(m, matchmod.patternmatcher)
-        self.assertEqual(m.visitchildrenset(b'.'), b'this')
         self.assertEqual(m.visitchildrenset(b'dir/subdir/x'), set())
         self.assertEqual(m.visitchildrenset(b'folder'), set())
-        # FIXME: These should probably be {'subdir'} and 'this', respectively,
-        # or at least 'this' and 'this'.
+        # FIXME: These should probably be {'dir'}, {'subdir'} and 'this',
+        # respectively, or at least 'this' for all three.
+        self.assertEqual(m.visitchildrenset(b''), set())
         self.assertEqual(m.visitchildrenset(b'dir'), set())
         self.assertEqual(m.visitchildrenset(b'dir/subdir'), set())
 
     def testVisitdirGlob(self):
         m = matchmod.match(b'x', b'', patterns=[b'glob:dir/z*'])
         assert isinstance(m, matchmod.patternmatcher)
-        self.assertTrue(m.visitdir(b'.'))
+        self.assertTrue(m.visitdir(b''))
         self.assertTrue(m.visitdir(b'dir'))
         self.assertFalse(m.visitdir(b'folder'))
         # OPT: these should probably be False.
@@ -115,7 +115,7 @@
     def testVisitchildrensetGlob(self):
         m = matchmod.match(b'x', b'', patterns=[b'glob:dir/z*'])
         assert isinstance(m, matchmod.patternmatcher)
-        self.assertEqual(m.visitchildrenset(b'.'), b'this')
+        self.assertEqual(m.visitchildrenset(b''), b'this')
         self.assertEqual(m.visitchildrenset(b'folder'), set())
         self.assertEqual(m.visitchildrenset(b'dir'), b'this')
         # OPT: these should probably be set().
@@ -127,7 +127,7 @@
     def testVisitdirPrefix(self):
         m = matchmod.match(b'x', b'', include=[b'path:dir/subdir'])
         assert isinstance(m, matchmod.includematcher)
-        self.assertTrue(m.visitdir(b'.'))
+        self.assertTrue(m.visitdir(b''))
         self.assertTrue(m.visitdir(b'dir'))
         self.assertEqual(m.visitdir(b'dir/subdir'), b'all')
         # OPT: This should probably be 'all' if its parent is?
@@ -137,7 +137,7 @@
     def testVisitchildrensetPrefix(self):
         m = matchmod.match(b'x', b'', include=[b'path:dir/subdir'])
         assert isinstance(m, matchmod.includematcher)
-        self.assertEqual(m.visitchildrenset(b'.'), {b'dir'})
+        self.assertEqual(m.visitchildrenset(b''), {b'dir'})
         self.assertEqual(m.visitchildrenset(b'dir'), {b'subdir'})
         self.assertEqual(m.visitchildrenset(b'dir/subdir'), b'all')
         # OPT: This should probably be 'all' if its parent is?
@@ -147,7 +147,7 @@
     def testVisitdirRootfilesin(self):
         m = matchmod.match(b'x', b'', include=[b'rootfilesin:dir/subdir'])
         assert isinstance(m, matchmod.includematcher)
-        self.assertTrue(m.visitdir(b'.'))
+        self.assertTrue(m.visitdir(b''))
         self.assertTrue(m.visitdir(b'dir'))
         self.assertTrue(m.visitdir(b'dir/subdir'))
         self.assertFalse(m.visitdir(b'dir/subdir/x'))
@@ -156,7 +156,7 @@
     def testVisitchildrensetRootfilesin(self):
         m = matchmod.match(b'x', b'', include=[b'rootfilesin:dir/subdir'])
         assert isinstance(m, matchmod.includematcher)
-        self.assertEqual(m.visitchildrenset(b'.'), {b'dir'})
+        self.assertEqual(m.visitchildrenset(b''), {b'dir'})
         self.assertEqual(m.visitchildrenset(b'dir'), {b'subdir'})
         self.assertEqual(m.visitchildrenset(b'dir/subdir'), b'this')
         self.assertEqual(m.visitchildrenset(b'dir/subdir/x'), set())
@@ -165,7 +165,7 @@
     def testVisitdirGlob(self):
         m = matchmod.match(b'x', b'', include=[b'glob:dir/z*'])
         assert isinstance(m, matchmod.includematcher)
-        self.assertTrue(m.visitdir(b'.'))
+        self.assertTrue(m.visitdir(b''))
         self.assertTrue(m.visitdir(b'dir'))
         self.assertFalse(m.visitdir(b'folder'))
         # OPT: these should probably be False.
@@ -175,7 +175,7 @@
     def testVisitchildrensetGlob(self):
         m = matchmod.match(b'x', b'', include=[b'glob:dir/z*'])
         assert isinstance(m, matchmod.includematcher)
-        self.assertEqual(m.visitchildrenset(b'.'), {b'dir'})
+        self.assertEqual(m.visitchildrenset(b''), {b'dir'})
         self.assertEqual(m.visitchildrenset(b'folder'), set())
         self.assertEqual(m.visitchildrenset(b'dir'), b'this')
         # OPT: these should probably be set().
@@ -187,7 +187,7 @@
     def testVisitdir(self):
         m = matchmod.exact(files=[b'dir/subdir/foo.txt'])
         assert isinstance(m, matchmod.exactmatcher)
-        self.assertTrue(m.visitdir(b'.'))
+        self.assertTrue(m.visitdir(b''))
         self.assertTrue(m.visitdir(b'dir'))
         self.assertTrue(m.visitdir(b'dir/subdir'))
         self.assertFalse(m.visitdir(b'dir/subdir/foo.txt'))
@@ -198,7 +198,7 @@
     def testVisitchildrenset(self):
         m = matchmod.exact(files=[b'dir/subdir/foo.txt'])
         assert isinstance(m, matchmod.exactmatcher)
-        self.assertEqual(m.visitchildrenset(b'.'), {b'dir'})
+        self.assertEqual(m.visitchildrenset(b''), {b'dir'})
         self.assertEqual(m.visitchildrenset(b'dir'), {b'subdir'})
         self.assertEqual(m.visitchildrenset(b'dir/subdir'), {b'foo.txt'})
         self.assertEqual(m.visitchildrenset(b'dir/subdir/x'), set())
@@ -212,7 +212,7 @@
                                   # no file in a/b/c
                                   b'a/b/c/d/file4.txt'])
         assert isinstance(m, matchmod.exactmatcher)
-        self.assertEqual(m.visitchildrenset(b'.'), {b'a', b'rootfile.txt'})
+        self.assertEqual(m.visitchildrenset(b''), {b'a', b'rootfile.txt'})
         self.assertEqual(m.visitchildrenset(b'a'), {b'b', b'file1.txt'})
         self.assertEqual(m.visitchildrenset(b'a/b'), {b'c', b'file2.txt'})
         self.assertEqual(m.visitchildrenset(b'a/b/c'), {b'd'})
@@ -227,7 +227,7 @@
         m2 = matchmod.alwaysmatcher()
         dm = matchmod.differencematcher(m1, m2)
         # dm should be equivalent to a nevermatcher.
-        self.assertFalse(dm.visitdir(b'.'))
+        self.assertFalse(dm.visitdir(b''))
         self.assertFalse(dm.visitdir(b'dir'))
         self.assertFalse(dm.visitdir(b'dir/subdir'))
         self.assertFalse(dm.visitdir(b'dir/subdir/z'))
@@ -240,7 +240,7 @@
         m2 = matchmod.alwaysmatcher()
         dm = matchmod.differencematcher(m1, m2)
         # dm should be equivalent to a nevermatcher.
-        self.assertEqual(dm.visitchildrenset(b'.'), set())
+        self.assertEqual(dm.visitchildrenset(b''), set())
         self.assertEqual(dm.visitchildrenset(b'dir'), set())
         self.assertEqual(dm.visitchildrenset(b'dir/subdir'), set())
         self.assertEqual(dm.visitchildrenset(b'dir/subdir/z'), set())
@@ -258,7 +258,7 @@
         # assertTrue does NOT verify that it's a bool, just that it's truthy.
         # While we may want to eventually make these return 'all', they should
         # not currently do so.
-        self.assertEqual(dm.visitdir(b'.'), b'all')
+        self.assertEqual(dm.visitdir(b''), b'all')
         self.assertEqual(dm.visitdir(b'dir'), b'all')
         self.assertEqual(dm.visitdir(b'dir/subdir'), b'all')
         self.assertEqual(dm.visitdir(b'dir/subdir/z'), b'all')
@@ -271,7 +271,7 @@
         m2 = matchmod.nevermatcher()
         dm = matchmod.differencematcher(m1, m2)
         # dm should be equivalent to a alwaysmatcher.
-        self.assertEqual(dm.visitchildrenset(b'.'), b'all')
+        self.assertEqual(dm.visitchildrenset(b''), b'all')
         self.assertEqual(dm.visitchildrenset(b'dir'), b'all')
         self.assertEqual(dm.visitchildrenset(b'dir/subdir'), b'all')
         self.assertEqual(dm.visitchildrenset(b'dir/subdir/z'), b'all')
@@ -283,7 +283,7 @@
         m1 = matchmod.alwaysmatcher()
         m2 = matchmod.match(b'', b'', patterns=[b'path:dir/subdir'])
         dm = matchmod.differencematcher(m1, m2)
-        self.assertEqual(dm.visitdir(b'.'), True)
+        self.assertEqual(dm.visitdir(b''), True)
         self.assertEqual(dm.visitdir(b'dir'), True)
         self.assertFalse(dm.visitdir(b'dir/subdir'))
         # OPT: We should probably return False for these; we don't because
@@ -298,7 +298,7 @@
         m1 = matchmod.alwaysmatcher()
         m2 = matchmod.match(b'', b'', patterns=[b'path:dir/subdir'])
         dm = matchmod.differencematcher(m1, m2)
-        self.assertEqual(dm.visitchildrenset(b'.'), b'this')
+        self.assertEqual(dm.visitchildrenset(b''), b'this')
         self.assertEqual(dm.visitchildrenset(b'dir'), b'this')
         self.assertEqual(dm.visitchildrenset(b'dir/subdir'), set())
         self.assertEqual(dm.visitchildrenset(b'dir/foo'), b'all')
@@ -315,7 +315,7 @@
         m1 = matchmod.match(b'', b'', include=[b'path:dir/subdir'])
         m2 = matchmod.match(b'', b'', include=[b'rootfilesin:dir'])
         dm = matchmod.differencematcher(m1, m2)
-        self.assertEqual(dm.visitdir(b'.'), True)
+        self.assertEqual(dm.visitdir(b''), True)
         self.assertEqual(dm.visitdir(b'dir'), True)
         self.assertEqual(dm.visitdir(b'dir/subdir'), b'all')
         self.assertFalse(dm.visitdir(b'dir/foo'))
@@ -330,7 +330,7 @@
         m1 = matchmod.match(b'', b'', include=[b'path:dir/subdir'])
         m2 = matchmod.match(b'', b'', include=[b'rootfilesin:dir'])
         dm = matchmod.differencematcher(m1, m2)
-        self.assertEqual(dm.visitchildrenset(b'.'), {b'dir'})
+        self.assertEqual(dm.visitchildrenset(b''), {b'dir'})
         self.assertEqual(dm.visitchildrenset(b'dir'), {b'subdir'})
         self.assertEqual(dm.visitchildrenset(b'dir/subdir'), b'all')
         self.assertEqual(dm.visitchildrenset(b'dir/foo'), set())
@@ -348,7 +348,7 @@
         m2 = matchmod.alwaysmatcher()
         im = matchmod.intersectmatchers(m1, m2)
         # im should be equivalent to a alwaysmatcher.
-        self.assertEqual(im.visitdir(b'.'), b'all')
+        self.assertEqual(im.visitdir(b''), b'all')
         self.assertEqual(im.visitdir(b'dir'), b'all')
         self.assertEqual(im.visitdir(b'dir/subdir'), b'all')
         self.assertEqual(im.visitdir(b'dir/subdir/z'), b'all')
@@ -361,7 +361,7 @@
         m2 = matchmod.alwaysmatcher()
         im = matchmod.intersectmatchers(m1, m2)
         # im should be equivalent to a alwaysmatcher.
-        self.assertEqual(im.visitchildrenset(b'.'), b'all')
+        self.assertEqual(im.visitchildrenset(b''), b'all')
         self.assertEqual(im.visitchildrenset(b'dir'), b'all')
         self.assertEqual(im.visitchildrenset(b'dir/subdir'), b'all')
         self.assertEqual(im.visitchildrenset(b'dir/subdir/z'), b'all')
@@ -374,7 +374,7 @@
         m2 = matchmod.nevermatcher()
         im = matchmod.intersectmatchers(m1, m2)
         # im should be equivalent to a nevermatcher.
-        self.assertFalse(im.visitdir(b'.'))
+        self.assertFalse(im.visitdir(b''))
         self.assertFalse(im.visitdir(b'dir'))
         self.assertFalse(im.visitdir(b'dir/subdir'))
         self.assertFalse(im.visitdir(b'dir/subdir/z'))
@@ -387,7 +387,7 @@
         m2 = matchmod.nevermatcher()
         im = matchmod.intersectmatchers(m1, m2)
         # im should be equivalent to a nevermqtcher.
-        self.assertEqual(im.visitchildrenset(b'.'), set())
+        self.assertEqual(im.visitchildrenset(b''), set())
         self.assertEqual(im.visitchildrenset(b'dir'), set())
         self.assertEqual(im.visitchildrenset(b'dir/subdir'), set())
         self.assertEqual(im.visitchildrenset(b'dir/subdir/z'), set())
@@ -399,7 +399,7 @@
         m1 = matchmod.alwaysmatcher()
         m2 = matchmod.match(b'', b'', patterns=[b'path:dir/subdir'])
         im = matchmod.intersectmatchers(m1, m2)
-        self.assertEqual(im.visitdir(b'.'), True)
+        self.assertEqual(im.visitdir(b''), True)
         self.assertEqual(im.visitdir(b'dir'), True)
         self.assertEqual(im.visitdir(b'dir/subdir'), b'all')
         self.assertFalse(im.visitdir(b'dir/foo'))
@@ -414,7 +414,7 @@
         m1 = matchmod.alwaysmatcher()
         m2 = matchmod.match(b'', b'', include=[b'path:dir/subdir'])
         im = matchmod.intersectmatchers(m1, m2)
-        self.assertEqual(im.visitchildrenset(b'.'), {b'dir'})
+        self.assertEqual(im.visitchildrenset(b''), {b'dir'})
         self.assertEqual(im.visitchildrenset(b'dir'), {b'subdir'})
         self.assertEqual(im.visitchildrenset(b'dir/subdir'), b'all')
         self.assertEqual(im.visitchildrenset(b'dir/foo'), set())
@@ -429,7 +429,7 @@
         m1 = matchmod.match(b'', b'', include=[b'path:dir/subdir'])
         m2 = matchmod.match(b'', b'', include=[b'rootfilesin:dir'])
         im = matchmod.intersectmatchers(m1, m2)
-        self.assertEqual(im.visitdir(b'.'), True)
+        self.assertEqual(im.visitdir(b''), True)
         self.assertEqual(im.visitdir(b'dir'), True)
         self.assertFalse(im.visitdir(b'dir/subdir'))
         self.assertFalse(im.visitdir(b'dir/foo'))
@@ -441,7 +441,7 @@
         m1 = matchmod.match(b'', b'', include=[b'path:dir/subdir'])
         m2 = matchmod.match(b'', b'', include=[b'rootfilesin:dir'])
         im = matchmod.intersectmatchers(m1, m2)
-        self.assertEqual(im.visitchildrenset(b'.'), {b'dir'})
+        self.assertEqual(im.visitchildrenset(b''), {b'dir'})
         self.assertEqual(im.visitchildrenset(b'dir'), b'this')
         self.assertEqual(im.visitchildrenset(b'dir/subdir'), set())
         self.assertEqual(im.visitchildrenset(b'dir/foo'), set())
@@ -456,7 +456,7 @@
         m2 = matchmod.match(b'', b'', include=[b'path:folder'])
         im = matchmod.intersectmatchers(m1, m2)
         # FIXME: is True correct here?
-        self.assertEqual(im.visitdir(b'.'), True)
+        self.assertEqual(im.visitdir(b''), True)
         self.assertFalse(im.visitdir(b'dir'))
         self.assertFalse(im.visitdir(b'dir/subdir'))
         self.assertFalse(im.visitdir(b'dir/foo'))
@@ -469,7 +469,7 @@
         m2 = matchmod.match(b'', b'', include=[b'path:folder'])
         im = matchmod.intersectmatchers(m1, m2)
         # FIXME: is set() correct here?
-        self.assertEqual(im.visitchildrenset(b'.'), set())
+        self.assertEqual(im.visitchildrenset(b''), set())
         self.assertEqual(im.visitchildrenset(b'dir'), set())
         self.assertEqual(im.visitchildrenset(b'dir/subdir'), set())
         self.assertEqual(im.visitchildrenset(b'dir/foo'), set())
@@ -483,7 +483,7 @@
         m1 = matchmod.match(b'', b'', include=[b'path:dir/subdir/x'])
         m2 = matchmod.match(b'', b'', include=[b'path:dir/subdir'])
         im = matchmod.intersectmatchers(m1, m2)
-        self.assertEqual(im.visitdir(b'.'), True)
+        self.assertEqual(im.visitdir(b''), True)
         self.assertEqual(im.visitdir(b'dir'), True)
         self.assertEqual(im.visitdir(b'dir/subdir'), True)
         self.assertFalse(im.visitdir(b'dir/foo'))
@@ -496,7 +496,7 @@
         m1 = matchmod.match(b'', b'', include=[b'path:dir/subdir/x'])
         m2 = matchmod.match(b'', b'', include=[b'path:dir/subdir'])
         im = matchmod.intersectmatchers(m1, m2)
-        self.assertEqual(im.visitchildrenset(b'.'), {b'dir'})
+        self.assertEqual(im.visitchildrenset(b''), {b'dir'})
         self.assertEqual(im.visitchildrenset(b'dir'), {b'subdir'})
         self.assertEqual(im.visitchildrenset(b'dir/subdir'), {b'x'})
         self.assertEqual(im.visitchildrenset(b'dir/foo'), set())
@@ -512,7 +512,7 @@
         m2 = matchmod.match(b'', b'', include=[b'path:dir/subdir/z'])
         im = matchmod.intersectmatchers(m1, m2)
         # OPT: these next three could probably be False as well.
-        self.assertEqual(im.visitdir(b'.'), True)
+        self.assertEqual(im.visitdir(b''), True)
         self.assertEqual(im.visitdir(b'dir'), True)
         self.assertEqual(im.visitdir(b'dir/subdir'), True)
         self.assertFalse(im.visitdir(b'dir/foo'))
@@ -525,7 +525,7 @@
         m2 = matchmod.match(b'', b'', include=[b'path:dir/subdir/z'])
         im = matchmod.intersectmatchers(m1, m2)
         # OPT: these next two could probably be set() as well.
-        self.assertEqual(im.visitchildrenset(b'.'), {b'dir'})
+        self.assertEqual(im.visitchildrenset(b''), {b'dir'})
         self.assertEqual(im.visitchildrenset(b'dir'), {b'subdir'})
         self.assertEqual(im.visitchildrenset(b'dir/subdir'), set())
         self.assertEqual(im.visitchildrenset(b'dir/foo'), set())
@@ -540,7 +540,7 @@
         m2 = matchmod.alwaysmatcher()
         um = matchmod.unionmatcher([m1, m2])
         # um should be equivalent to a alwaysmatcher.
-        self.assertEqual(um.visitdir(b'.'), b'all')
+        self.assertEqual(um.visitdir(b''), b'all')
         self.assertEqual(um.visitdir(b'dir'), b'all')
         self.assertEqual(um.visitdir(b'dir/subdir'), b'all')
         self.assertEqual(um.visitdir(b'dir/subdir/z'), b'all')
@@ -553,7 +553,7 @@
         m2 = matchmod.alwaysmatcher()
         um = matchmod.unionmatcher([m1, m2])
         # um should be equivalent to a alwaysmatcher.
-        self.assertEqual(um.visitchildrenset(b'.'), b'all')
+        self.assertEqual(um.visitchildrenset(b''), b'all')
         self.assertEqual(um.visitchildrenset(b'dir'), b'all')
         self.assertEqual(um.visitchildrenset(b'dir/subdir'), b'all')
         self.assertEqual(um.visitchildrenset(b'dir/subdir/z'), b'all')
@@ -566,7 +566,7 @@
         m2 = matchmod.alwaysmatcher()
         um = matchmod.unionmatcher([m1, m2])
         # um should be equivalent to a alwaysmatcher.
-        self.assertEqual(um.visitdir(b'.'), b'all')
+        self.assertEqual(um.visitdir(b''), b'all')
         self.assertEqual(um.visitdir(b'dir'), b'all')
         self.assertEqual(um.visitdir(b'dir/subdir'), b'all')
         self.assertEqual(um.visitdir(b'dir/subdir/z'), b'all')
@@ -579,7 +579,7 @@
         m2 = matchmod.alwaysmatcher()
         um = matchmod.unionmatcher([m1, m2])
         # um should be equivalent to a alwaysmatcher.
-        self.assertEqual(um.visitchildrenset(b'.'), b'all')
+        self.assertEqual(um.visitchildrenset(b''), b'all')
         self.assertEqual(um.visitchildrenset(b'dir'), b'all')
         self.assertEqual(um.visitchildrenset(b'dir/subdir'), b'all')
         self.assertEqual(um.visitchildrenset(b'dir/subdir/z'), b'all')
@@ -592,7 +592,7 @@
         m2 = matchmod.nevermatcher()
         um = matchmod.unionmatcher([m1, m2])
         # um should be equivalent to a alwaysmatcher.
-        self.assertEqual(um.visitdir(b'.'), b'all')
+        self.assertEqual(um.visitdir(b''), b'all')
         self.assertEqual(um.visitdir(b'dir'), b'all')
         self.assertEqual(um.visitdir(b'dir/subdir'), b'all')
         self.assertEqual(um.visitdir(b'dir/subdir/z'), b'all')
@@ -605,7 +605,7 @@
         m2 = matchmod.nevermatcher()
         um = matchmod.unionmatcher([m1, m2])
         # um should be equivalent to a alwaysmatcher.
-        self.assertEqual(um.visitchildrenset(b'.'), b'all')
+        self.assertEqual(um.visitchildrenset(b''), b'all')
         self.assertEqual(um.visitchildrenset(b'dir'), b'all')
         self.assertEqual(um.visitchildrenset(b'dir/subdir'), b'all')
         self.assertEqual(um.visitchildrenset(b'dir/subdir/z'), b'all')
@@ -617,7 +617,7 @@
         m1 = matchmod.alwaysmatcher()
         m2 = matchmod.match(b'', b'', patterns=[b'path:dir/subdir'])
         um = matchmod.unionmatcher([m1, m2])
-        self.assertEqual(um.visitdir(b'.'), b'all')
+        self.assertEqual(um.visitdir(b''), b'all')
         self.assertEqual(um.visitdir(b'dir'), b'all')
         self.assertEqual(um.visitdir(b'dir/subdir'), b'all')
         self.assertEqual(um.visitdir(b'dir/foo'), b'all')
@@ -629,7 +629,7 @@
         m1 = matchmod.alwaysmatcher()
         m2 = matchmod.match(b'', b'', include=[b'path:dir/subdir'])
         um = matchmod.unionmatcher([m1, m2])
-        self.assertEqual(um.visitchildrenset(b'.'), b'all')
+        self.assertEqual(um.visitchildrenset(b''), b'all')
         self.assertEqual(um.visitchildrenset(b'dir'), b'all')
         self.assertEqual(um.visitchildrenset(b'dir/subdir'), b'all')
         self.assertEqual(um.visitchildrenset(b'dir/foo'), b'all')
@@ -643,7 +643,7 @@
         m1 = matchmod.match(b'', b'', include=[b'path:dir/subdir'])
         m2 = matchmod.match(b'', b'', include=[b'rootfilesin:dir'])
         um = matchmod.unionmatcher([m1, m2])
-        self.assertEqual(um.visitdir(b'.'), True)
+        self.assertEqual(um.visitdir(b''), True)
         self.assertEqual(um.visitdir(b'dir'), True)
         self.assertEqual(um.visitdir(b'dir/subdir'), b'all')
         self.assertFalse(um.visitdir(b'dir/foo'))
@@ -656,7 +656,7 @@
         m1 = matchmod.match(b'', b'', include=[b'path:dir/subdir'])
         m2 = matchmod.match(b'', b'', include=[b'rootfilesin:dir'])
         um = matchmod.unionmatcher([m1, m2])
-        self.assertEqual(um.visitchildrenset(b'.'), {b'dir'})
+        self.assertEqual(um.visitchildrenset(b''), {b'dir'})
         self.assertEqual(um.visitchildrenset(b'dir'), b'this')
         self.assertEqual(um.visitchildrenset(b'dir/subdir'), b'all')
         self.assertEqual(um.visitchildrenset(b'dir/foo'), set())
@@ -671,7 +671,7 @@
         m1 = matchmod.match(b'', b'', include=[b'path:dir/subdir'])
         m2 = matchmod.match(b'', b'', include=[b'path:folder'])
         um = matchmod.unionmatcher([m1, m2])
-        self.assertEqual(um.visitdir(b'.'), True)
+        self.assertEqual(um.visitdir(b''), True)
         self.assertEqual(um.visitdir(b'dir'), True)
         self.assertEqual(um.visitdir(b'dir/subdir'), b'all')
         self.assertFalse(um.visitdir(b'dir/foo'))
@@ -684,7 +684,7 @@
         m1 = matchmod.match(b'', b'', include=[b'path:dir/subdir'])
         m2 = matchmod.match(b'', b'', include=[b'path:folder'])
         um = matchmod.unionmatcher([m1, m2])
-        self.assertEqual(um.visitchildrenset(b'.'), {b'folder', b'dir'})
+        self.assertEqual(um.visitchildrenset(b''), {b'folder', b'dir'})
         self.assertEqual(um.visitchildrenset(b'dir'), {b'subdir'})
         self.assertEqual(um.visitchildrenset(b'dir/subdir'), b'all')
         self.assertEqual(um.visitchildrenset(b'dir/foo'), set())
@@ -699,7 +699,7 @@
         m1 = matchmod.match(b'', b'', include=[b'path:dir/subdir/x'])
         m2 = matchmod.match(b'', b'', include=[b'path:dir/subdir'])
         um = matchmod.unionmatcher([m1, m2])
-        self.assertEqual(um.visitdir(b'.'), True)
+        self.assertEqual(um.visitdir(b''), True)
         self.assertEqual(um.visitdir(b'dir'), True)
         self.assertEqual(um.visitdir(b'dir/subdir'), b'all')
         self.assertFalse(um.visitdir(b'dir/foo'))
@@ -712,7 +712,7 @@
         m1 = matchmod.match(b'', b'', include=[b'path:dir/subdir/x'])
         m2 = matchmod.match(b'', b'', include=[b'path:dir/subdir'])
         um = matchmod.unionmatcher([m1, m2])
-        self.assertEqual(um.visitchildrenset(b'.'), {b'dir'})
+        self.assertEqual(um.visitchildrenset(b''), {b'dir'})
         self.assertEqual(um.visitchildrenset(b'dir'), {b'subdir'})
         self.assertEqual(um.visitchildrenset(b'dir/subdir'), b'all')
         self.assertEqual(um.visitchildrenset(b'dir/foo'), set())
@@ -728,7 +728,7 @@
         m2 = matchmod.match(b'', b'', include=[b'path:dir/subdir/z'])
         um = matchmod.unionmatcher([m1, m2])
         # OPT: these next three could probably be False as well.
-        self.assertEqual(um.visitdir(b'.'), True)
+        self.assertEqual(um.visitdir(b''), True)
         self.assertEqual(um.visitdir(b'dir'), True)
         self.assertEqual(um.visitdir(b'dir/subdir'), True)
         self.assertFalse(um.visitdir(b'dir/foo'))
@@ -740,7 +740,7 @@
         m1 = matchmod.match(b'', b'', include=[b'path:dir/subdir/x'])
         m2 = matchmod.match(b'', b'', include=[b'path:dir/subdir/z'])
         um = matchmod.unionmatcher([m1, m2])
-        self.assertEqual(um.visitchildrenset(b'.'), {b'dir'})
+        self.assertEqual(um.visitchildrenset(b''), {b'dir'})
         self.assertEqual(um.visitchildrenset(b'dir'), {b'subdir'})
         self.assertEqual(um.visitchildrenset(b'dir/subdir'), {b'x', b'z'})
         self.assertEqual(um.visitchildrenset(b'dir/foo'), set())
@@ -754,7 +754,7 @@
         m = matchmod.match(b'', b'', include=[b'path:dir/subdir'])
         sm = matchmod.subdirmatcher(b'dir', m)
 
-        self.assertEqual(sm.visitdir(b'.'), True)
+        self.assertEqual(sm.visitdir(b''), True)
         self.assertEqual(sm.visitdir(b'subdir'), b'all')
         # OPT: These next two should probably be 'all' not True.
         self.assertEqual(sm.visitdir(b'subdir/x'), True)
@@ -765,7 +765,7 @@
         m = matchmod.match(b'', b'', include=[b'path:dir/subdir'])
         sm = matchmod.subdirmatcher(b'dir', m)
 
-        self.assertEqual(sm.visitchildrenset(b'.'), {b'subdir'})
+        self.assertEqual(sm.visitchildrenset(b''), {b'subdir'})
         self.assertEqual(sm.visitchildrenset(b'subdir'), b'all')
         # OPT: These next two should probably be 'all' not 'this'.
         self.assertEqual(sm.visitchildrenset(b'subdir/x'), b'this')
@@ -795,12 +795,12 @@
         self.assertEqual(bool(pm(b'd/e/b.txt')), False)
         self.assertEqual(bool(pm(b'd/e/f/b.txt')), True)
 
-        self.assertEqual(m.visitdir(b'.'), True)
+        self.assertEqual(m.visitdir(b''), True)
         self.assertEqual(m.visitdir(b'e'), True)
         self.assertEqual(m.visitdir(b'e/f'), True)
         self.assertEqual(m.visitdir(b'e/f/g'), False)
 
-        self.assertEqual(pm.visitdir(b'.'), True)
+        self.assertEqual(pm.visitdir(b''), True)
         self.assertEqual(pm.visitdir(b'd'), True)
         self.assertEqual(pm.visitdir(b'd/e'), True)
         self.assertEqual(pm.visitdir(b'd/e/f'), True)
@@ -814,7 +814,7 @@
         # OPT: visitchildrenset could possibly return {'e'} and {'f'} for these
         # next two, respectively; patternmatcher does not have this
         # optimization.
-        self.assertEqual(m.visitchildrenset(b'.'), b'this')
+        self.assertEqual(m.visitchildrenset(b''), b'this')
         self.assertEqual(m.visitchildrenset(b'e'), b'this')
         self.assertEqual(m.visitchildrenset(b'e/f'), b'this')
         self.assertEqual(m.visitchildrenset(b'e/f/g'), set())
@@ -822,7 +822,7 @@
         # OPT: visitchildrenset could possibly return {'d'}, {'e'}, and {'f'}
         # for these next three, respectively; patternmatcher does not have this
         # optimization.
-        self.assertEqual(pm.visitchildrenset(b'.'), b'this')
+        self.assertEqual(pm.visitchildrenset(b''), b'this')
         self.assertEqual(pm.visitchildrenset(b'd'), b'this')
         self.assertEqual(pm.visitchildrenset(b'd/e'), b'this')
         self.assertEqual(pm.visitchildrenset(b'd/e/f'), b'this')
--- a/tests/test-merge-commit.t	Mon May 20 10:08:28 2019 +0200
+++ b/tests/test-merge-commit.t	Mon May 20 11:40:47 2019 -0400
@@ -67,7 +67,6 @@
 This should use bar@rev2 as the ancestor:
 
   $ hg --debug merge 3
-    searching for copies back to rev 1
   resolving manifests
    branchmerge: True, force: False, partial: False
    ancestor: 0f2ff26688b9, local: 2263c1be0967+, remote: 0555950ead28
@@ -155,7 +154,6 @@
 This should use bar@rev2 as the ancestor:
 
   $ hg --debug merge 3
-    searching for copies back to rev 1
   resolving manifests
    branchmerge: True, force: False, partial: False
    ancestor: 0f2ff26688b9, local: 2263c1be0967+, remote: 3ffa6b9e35f0
--- a/tests/test-merge-criss-cross.t	Mon May 20 10:08:28 2019 +0200
+++ b/tests/test-merge-criss-cross.t	Mon May 20 11:40:47 2019 -0400
@@ -11,15 +11,9 @@
 
   $ hg up -qr0
   $ echo '2 first change' > f2
-  $ mkdir d1
-  $ echo '0 base' > d1/f3
-  $ echo '0 base' > d1/f4
-  $ hg add -q d1
   $ hg ci -qm '2 first change f2'
 
   $ hg merge -qr 1
-  $ hg rm d1/f3
-  $ hg mv -q d1 d2
   $ hg ci -m '3 merge'
 
   $ hg up -qr2
@@ -30,38 +24,38 @@
   $ hg ci -m '5 second change f1'
 
   $ hg up -r3
-  2 files updated, 0 files merged, 2 files removed, 0 files unresolved
+  1 files updated, 0 files merged, 0 files removed, 0 files unresolved
   $ echo '6 second change' > f2
   $ hg ci -m '6 second change f2'
 
   $ hg log -G
-  @  changeset:   6:6373bbfdae1d
+  @  changeset:   6:3b08d01b0ab5
   |  tag:         tip
-  |  parent:      3:c202c8af058d
+  |  parent:      3:cf89f02107e5
   |  user:        test
   |  date:        Thu Jan 01 00:00:00 1970 +0000
   |  summary:     6 second change f2
   |
-  | o  changeset:   5:e673248094b1
+  | o  changeset:   5:adfe50279922
   | |  user:        test
   | |  date:        Thu Jan 01 00:00:00 1970 +0000
   | |  summary:     5 second change f1
   | |
-  | o    changeset:   4:177f58377c06
-  | |\   parent:      2:d1d156401c1b
+  | o    changeset:   4:7d3e55501ae6
+  | |\   parent:      2:40663881a6dd
   | | |  parent:      1:0f6b37dbe527
   | | |  user:        test
   | | |  date:        Thu Jan 01 00:00:00 1970 +0000
   | | |  summary:     4 merge
   | | |
-  o---+  changeset:   3:c202c8af058d
-  | | |  parent:      2:d1d156401c1b
+  o---+  changeset:   3:cf89f02107e5
+  | | |  parent:      2:40663881a6dd
   |/ /   parent:      1:0f6b37dbe527
   | |    user:        test
   | |    date:        Thu Jan 01 00:00:00 1970 +0000
   | |    summary:     3 merge
   | |
-  | o  changeset:   2:d1d156401c1b
+  | o  changeset:   2:40663881a6dd
   | |  parent:      0:40494bf2444c
   | |  user:        test
   | |  date:        Thu Jan 01 00:00:00 1970 +0000
@@ -79,51 +73,26 @@
   
 
   $ hg merge -v --debug --tool internal:dump 5 --config merge.preferancestor='!'
-  note: using 0f6b37dbe527 as ancestor of 6373bbfdae1d and e673248094b1
-        alternatively, use --config merge.preferancestor=d1d156401c1b
-    searching for copies back to rev 3
-    unmatched files in local:
-     d2/f4
-    unmatched files in other:
-     d1/f3
-     d1/f4
-    all copies found (* = to merge, ! = divergent, % = renamed and deleted):
-     src: 'd1/f4' -> dst: 'd2/f4' 
-    checking for directory renames
-     discovered dir src: 'd1/' -> dst: 'd2/'
-     pending file src: 'd1/f3' -> dst: 'd2/f3'
-     pending file src: 'd1/f4' -> dst: 'd2/f4'
+  note: using 0f6b37dbe527 as ancestor of 3b08d01b0ab5 and adfe50279922
+        alternatively, use --config merge.preferancestor=40663881a6dd
   resolving manifests
    branchmerge: True, force: False, partial: False
-   ancestor: 0f6b37dbe527, local: 6373bbfdae1d+, remote: e673248094b1
-   preserving d2/f4 for resolve of d2/f4
+   ancestor: 0f6b37dbe527, local: 3b08d01b0ab5+, remote: adfe50279922
    preserving f2 for resolve of f2
    f1: remote is newer -> g
   getting f1
-   d2/f3: local directory rename - get from d1/f3 -> dg
-  getting d1/f3 to d2/f3
-   d2/f4: local directory rename, both created -> m (premerge)
    f2: versions differ -> m (premerge)
   picked tool ':dump' for f2 (binary False symlink False changedelete False)
   merging f2
-  my f2@6373bbfdae1d+ other f2@e673248094b1 ancestor f2@0f6b37dbe527
+  my f2@3b08d01b0ab5+ other f2@adfe50279922 ancestor f2@0f6b37dbe527
    f2: versions differ -> m (merge)
   picked tool ':dump' for f2 (binary False symlink False changedelete False)
-  my f2@6373bbfdae1d+ other f2@e673248094b1 ancestor f2@0f6b37dbe527
-  3 files updated, 0 files merged, 0 files removed, 1 files unresolved
+  my f2@3b08d01b0ab5+ other f2@adfe50279922 ancestor f2@0f6b37dbe527
+  1 files updated, 0 files merged, 0 files removed, 1 files unresolved
   use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
   [1]
 
-  $ f --dump --recurse *
-  d2: directory with 2 files
-  d2/f3:
-  >>>
-  0 base
-  <<<
-  d2/f4:
-  >>>
-  0 base
-  <<<
+  $ f --dump *
   f1:
   >>>
   5 second change
@@ -151,13 +120,11 @@
 
   $ hg up -qC .
   $ hg merge -v --tool internal:dump 5 --config merge.preferancestor="null 40663881 3b08d"
-  note: using 0f6b37dbe527 as ancestor of 6373bbfdae1d and e673248094b1
-        alternatively, use --config merge.preferancestor=d1d156401c1b
+  note: using 40663881a6dd as ancestor of 3b08d01b0ab5 and adfe50279922
+        alternatively, use --config merge.preferancestor=0f6b37dbe527
   resolving manifests
-  getting f1
-  getting d1/f3 to d2/f3
-  merging f2
-  3 files updated, 0 files merged, 0 files removed, 1 files unresolved
+  merging f1
+  0 files updated, 0 files merged, 0 files removed, 1 files unresolved
   use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
   [1]
 
@@ -166,70 +133,34 @@
   $ rm f*
   $ hg up -qC .
   $ hg merge -v --debug --tool internal:dump 5 --config merge.preferancestor="*"
-  note: merging 6373bbfdae1d+ and e673248094b1 using bids from ancestors 0f6b37dbe527 and d1d156401c1b
+  note: merging 3b08d01b0ab5+ and adfe50279922 using bids from ancestors 0f6b37dbe527 and 40663881a6dd
   
   calculating bids for ancestor 0f6b37dbe527
-    searching for copies back to rev 3
-    unmatched files in local:
-     d2/f4
-    unmatched files in other:
-     d1/f3
-     d1/f4
-    all copies found (* = to merge, ! = divergent, % = renamed and deleted):
-     src: 'd1/f4' -> dst: 'd2/f4' 
-    checking for directory renames
-     discovered dir src: 'd1/' -> dst: 'd2/'
-     pending file src: 'd1/f3' -> dst: 'd2/f3'
-     pending file src: 'd1/f4' -> dst: 'd2/f4'
   resolving manifests
    branchmerge: True, force: False, partial: False
-   ancestor: 0f6b37dbe527, local: 6373bbfdae1d+, remote: e673248094b1
-   d2/f3: local directory rename - get from d1/f3 -> dg
-   d2/f4: local directory rename, both created -> m
+   ancestor: 0f6b37dbe527, local: 3b08d01b0ab5+, remote: adfe50279922
    f1: remote is newer -> g
    f2: versions differ -> m
   
-  calculating bids for ancestor d1d156401c1b
-    searching for copies back to rev 3
-    unmatched files in local:
-     d2/f4
-    all copies found (* = to merge, ! = divergent, % = renamed and deleted):
-     src: 'd1/f4' -> dst: 'd2/f4' 
-    checking for directory renames
-     discovered dir src: 'd1/' -> dst: 'd2/'
+  calculating bids for ancestor 40663881a6dd
   resolving manifests
    branchmerge: True, force: False, partial: False
-   ancestor: d1d156401c1b, local: 6373bbfdae1d+, remote: e673248094b1
+   ancestor: 40663881a6dd, local: 3b08d01b0ab5+, remote: adfe50279922
    f1: versions differ -> m
    f2: remote unchanged -> k
   
   auction for merging merge bids
-   d2/f3: consensus for dg
-   d2/f4: consensus for m
    f1: picking 'get' action
    f2: picking 'keep' action
   end of auction
   
-   preserving d2/f4 for resolve of d2/f4
    f1: remote is newer -> g
   getting f1
    f2: remote unchanged -> k
-   d2/f3: local directory rename - get from d1/f3 -> dg
-  getting d1/f3 to d2/f3
-   d2/f4: local directory rename, both created -> m (premerge)
-  3 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  1 files updated, 0 files merged, 0 files removed, 0 files unresolved
   (branch merge, don't forget to commit)
 
-  $ f --dump --recurse *
-  d2: directory with 2 files
-  d2/f3:
-  >>>
-  0 base
-  <<<
-  d2/f4:
-  >>>
-  0 base
-  <<<
+  $ f --dump *
   f1:
   >>>
   5 second change
@@ -243,79 +174,36 @@
 The other way around:
 
   $ hg up -C -r5
-  4 files updated, 0 files merged, 1 files removed, 0 files unresolved
+  2 files updated, 0 files merged, 0 files removed, 0 files unresolved
   $ hg merge -v --debug --config merge.preferancestor="*"
-  note: merging e673248094b1+ and 6373bbfdae1d using bids from ancestors 0f6b37dbe527 and d1d156401c1b
+  note: merging adfe50279922+ and 3b08d01b0ab5 using bids from ancestors 0f6b37dbe527 and 40663881a6dd
   
   calculating bids for ancestor 0f6b37dbe527
-    searching for copies back to rev 3
-    unmatched files in local:
-     d1/f3
-     d1/f4
-    unmatched files in other:
-     d2/f4
-    all copies found (* = to merge, ! = divergent, % = renamed and deleted):
-     src: 'd1/f4' -> dst: 'd2/f4' 
-    checking for directory renames
-     discovered dir src: 'd1/' -> dst: 'd2/'
-     pending file src: 'd1/f3' -> dst: 'd2/f3'
-     pending file src: 'd1/f4' -> dst: 'd2/f4'
   resolving manifests
    branchmerge: True, force: False, partial: False
-   ancestor: 0f6b37dbe527, local: e673248094b1+, remote: 6373bbfdae1d
-   d2/f3: remote directory rename - move from d1/f3 -> dm
-   d2/f4: remote directory rename, both created -> m
+   ancestor: 0f6b37dbe527, local: adfe50279922+, remote: 3b08d01b0ab5
    f1: remote unchanged -> k
    f2: versions differ -> m
   
-  calculating bids for ancestor d1d156401c1b
-    searching for copies back to rev 3
-    unmatched files in other:
-     d2/f4
-    all copies found (* = to merge, ! = divergent, % = renamed and deleted):
-     src: 'd1/f4' -> dst: 'd2/f4' 
-    checking for directory renames
-     discovered dir src: 'd1/' -> dst: 'd2/'
+  calculating bids for ancestor 40663881a6dd
   resolving manifests
    branchmerge: True, force: False, partial: False
-   ancestor: d1d156401c1b, local: e673248094b1+, remote: 6373bbfdae1d
-   d1/f3: other deleted -> r
-   d1/f4: other deleted -> r
-   d2/f4: remote created -> g
+   ancestor: 40663881a6dd, local: adfe50279922+, remote: 3b08d01b0ab5
    f1: versions differ -> m
    f2: remote is newer -> g
   
   auction for merging merge bids
-   d1/f3: consensus for r
-   d1/f4: consensus for r
-   d2/f3: consensus for dm
-   d2/f4: picking 'get' action
    f1: picking 'keep' action
    f2: picking 'get' action
   end of auction
   
-   d1/f3: other deleted -> r
-  removing d1/f3
-   d1/f4: other deleted -> r
-  removing d1/f4
-   d2/f4: remote created -> g
-  getting d2/f4
    f2: remote is newer -> g
   getting f2
    f1: remote unchanged -> k
-  2 files updated, 0 files merged, 2 files removed, 0 files unresolved
+  1 files updated, 0 files merged, 0 files removed, 0 files unresolved
   (branch merge, don't forget to commit)
 
-  $ f --dump --recurse *
-  d2: directory with 2 files
-  d2/f3:
-  >>>
-  0 base
-  <<<
-  d2/f4:
-  >>>
-  0 base
-  <<<
+  $ f --dump *
   f1:
   >>>
   5 second change
@@ -329,85 +217,55 @@
 
   $ hg up -qC
   $ hg merge
-  2 files updated, 0 files merged, 2 files removed, 0 files unresolved
+  1 files updated, 0 files merged, 0 files removed, 0 files unresolved
   (branch merge, don't forget to commit)
 
   $ hg up -qC tip
   $ hg merge -v
-  note: merging 6373bbfdae1d+ and e673248094b1 using bids from ancestors 0f6b37dbe527 and d1d156401c1b
+  note: merging 3b08d01b0ab5+ and adfe50279922 using bids from ancestors 0f6b37dbe527 and 40663881a6dd
   
   calculating bids for ancestor 0f6b37dbe527
   resolving manifests
   
-  calculating bids for ancestor d1d156401c1b
+  calculating bids for ancestor 40663881a6dd
   resolving manifests
   
   auction for merging merge bids
-   d2/f3: consensus for dg
-   d2/f4: consensus for m
    f1: picking 'get' action
    f2: picking 'keep' action
   end of auction
   
   getting f1
-  getting d1/f3 to d2/f3
-  3 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  1 files updated, 0 files merged, 0 files removed, 0 files unresolved
   (branch merge, don't forget to commit)
 
   $ hg up -qC
   $ hg merge -v --debug --config merge.preferancestor="*"
-  note: merging 6373bbfdae1d+ and e673248094b1 using bids from ancestors 0f6b37dbe527 and d1d156401c1b
+  note: merging 3b08d01b0ab5+ and adfe50279922 using bids from ancestors 0f6b37dbe527 and 40663881a6dd
   
   calculating bids for ancestor 0f6b37dbe527
-    searching for copies back to rev 3
-    unmatched files in local:
-     d2/f4
-    unmatched files in other:
-     d1/f3
-     d1/f4
-    all copies found (* = to merge, ! = divergent, % = renamed and deleted):
-     src: 'd1/f4' -> dst: 'd2/f4' 
-    checking for directory renames
-     discovered dir src: 'd1/' -> dst: 'd2/'
-     pending file src: 'd1/f3' -> dst: 'd2/f3'
-     pending file src: 'd1/f4' -> dst: 'd2/f4'
   resolving manifests
    branchmerge: True, force: False, partial: False
-   ancestor: 0f6b37dbe527, local: 6373bbfdae1d+, remote: e673248094b1
-   d2/f3: local directory rename - get from d1/f3 -> dg
-   d2/f4: local directory rename, both created -> m
+   ancestor: 0f6b37dbe527, local: 3b08d01b0ab5+, remote: adfe50279922
    f1: remote is newer -> g
    f2: versions differ -> m
   
-  calculating bids for ancestor d1d156401c1b
-    searching for copies back to rev 3
-    unmatched files in local:
-     d2/f4
-    all copies found (* = to merge, ! = divergent, % = renamed and deleted):
-     src: 'd1/f4' -> dst: 'd2/f4' 
-    checking for directory renames
-     discovered dir src: 'd1/' -> dst: 'd2/'
+  calculating bids for ancestor 40663881a6dd
   resolving manifests
    branchmerge: True, force: False, partial: False
-   ancestor: d1d156401c1b, local: 6373bbfdae1d+, remote: e673248094b1
+   ancestor: 40663881a6dd, local: 3b08d01b0ab5+, remote: adfe50279922
    f1: versions differ -> m
    f2: remote unchanged -> k
   
   auction for merging merge bids
-   d2/f3: consensus for dg
-   d2/f4: consensus for m
    f1: picking 'get' action
    f2: picking 'keep' action
   end of auction
   
-   preserving d2/f4 for resolve of d2/f4
    f1: remote is newer -> g
   getting f1
    f2: remote unchanged -> k
-   d2/f3: local directory rename - get from d1/f3 -> dg
-  getting d1/f3 to d2/f3
-   d2/f4: local directory rename, both created -> m (premerge)
-  3 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  1 files updated, 0 files merged, 0 files removed, 0 files unresolved
   (branch merge, don't forget to commit)
 
 Test the greatest common ancestor returning multiple changesets
@@ -418,7 +276,7 @@
   date:        Thu Jan 01 00:00:00 1970 +0000
   summary:     1 first change f1
   
-  changeset:   2:d1d156401c1b
+  changeset:   2:40663881a6dd
   parent:      0:40494bf2444c
   user:        test
   date:        Thu Jan 01 00:00:00 1970 +0000
@@ -513,3 +371,80 @@
   2
 
   $ cd ..
+
+  $ hg init issue5020
+  $ cd issue5020
+
+  $ echo a > noop
+  $ hg ci -qAm initial
+
+  $ echo b > noop
+  $ hg ci -qAm 'uninteresting change'
+
+  $ hg up -q 0
+  $ mkdir d1
+  $ echo a > d1/a
+  $ echo b > d1/b
+  $ hg ci -qAm 'add d1/a and d1/b'
+
+  $ hg merge -q 1
+  $ hg rm d1/a
+  $ hg mv -q d1 d2
+  $ hg ci -qm 'merge while removing d1/a and moving d1/b to d2/b'
+
+  $ hg up -q 1
+  $ hg merge -q 2
+  $ hg ci -qm 'merge (no changes while merging)'
+  $ hg log -G -T '{rev}:{node|short} {desc}'
+  @    4:c0ef19750a22 merge (no changes while merging)
+  |\
+  +---o  3:6ca01f7342b9 merge while removing d1/a and moving d1/b to d2/b
+  | |/
+  | o  2:154e6000f54e add d1/a and d1/b
+  | |
+  o |  1:11b5b303e36c uninteresting change
+  |/
+  o  0:7b54db1ebf33 initial
+  
+  $ hg merge 3 --debug
+  note: merging c0ef19750a22+ and 6ca01f7342b9 using bids from ancestors 11b5b303e36c and 154e6000f54e
+  
+  calculating bids for ancestor 11b5b303e36c
+    unmatched files in local:
+     d1/a
+     d1/b
+    unmatched files in other:
+     d2/b
+  resolving manifests
+   branchmerge: True, force: False, partial: False
+   ancestor: 11b5b303e36c, local: c0ef19750a22+, remote: 6ca01f7342b9
+   d2/b: remote created -> g
+  
+  calculating bids for ancestor 154e6000f54e
+    unmatched files in other:
+     d2/b
+    all copies found (* = to merge, ! = divergent, % = renamed and deleted):
+     src: 'd1/b' -> dst: 'd2/b' 
+    checking for directory renames
+     discovered dir src: 'd1/' -> dst: 'd2/'
+  resolving manifests
+   branchmerge: True, force: False, partial: False
+   ancestor: 154e6000f54e, local: c0ef19750a22+, remote: 6ca01f7342b9
+   d1/a: other deleted -> r
+   d1/b: other deleted -> r
+   d2/b: remote created -> g
+  
+  auction for merging merge bids
+   d1/a: consensus for r
+   d1/b: consensus for r
+   d2/b: consensus for g
+  end of auction
+  
+   d1/a: other deleted -> r
+  removing d1/a
+   d1/b: other deleted -> r
+  removing d1/b
+   d2/b: remote created -> g
+  getting d2/b
+  1 files updated, 0 files merged, 2 files removed, 0 files unresolved
+  (branch merge, don't forget to commit)
--- a/tests/test-merge-no-file-change.t	Mon May 20 10:08:28 2019 +0200
+++ b/tests/test-merge-no-file-change.t	Mon May 20 11:40:47 2019 -0400
@@ -137,7 +137,7 @@
   $ hg ci --debug -m merge
   committing files:
   b
-  reusing manifest form p1 (listed files actually unchanged)
+  reusing manifest from p1 (listed files actually unchanged)
   committing changelog
   updating the branch cache
   committed changeset 3:c8d50407916ef8a5a97cb6e36ca9bc844a6ee13e
--- a/tests/test-merge-types.t	Mon May 20 10:08:28 2019 +0200
+++ b/tests/test-merge-types.t	Mon May 20 11:40:47 2019 -0400
@@ -30,7 +30,6 @@
 Symlink is local parent, executable is other:
 
   $ hg merge --debug
-    searching for copies back to rev 1
   resolving manifests
    branchmerge: True, force: False, partial: False
    ancestor: c334dc3be0da, local: 521a1e40188f+, remote: 3574f3e69b1c
@@ -63,7 +62,6 @@
   1 files updated, 0 files merged, 0 files removed, 0 files unresolved
 
   $ hg merge --debug --tool :union
-    searching for copies back to rev 1
   resolving manifests
    branchmerge: True, force: False, partial: False
    ancestor: c334dc3be0da, local: 3574f3e69b1c+, remote: 521a1e40188f
@@ -86,7 +84,6 @@
   1 files updated, 0 files merged, 0 files removed, 0 files unresolved
 
   $ hg merge --debug --tool :merge3
-    searching for copies back to rev 1
   resolving manifests
    branchmerge: True, force: False, partial: False
    ancestor: c334dc3be0da, local: 3574f3e69b1c+, remote: 521a1e40188f
@@ -109,7 +106,6 @@
   1 files updated, 0 files merged, 0 files removed, 0 files unresolved
 
   $ hg merge --debug --tool :merge-local
-    searching for copies back to rev 1
   resolving manifests
    branchmerge: True, force: False, partial: False
    ancestor: c334dc3be0da, local: 3574f3e69b1c+, remote: 521a1e40188f
@@ -131,7 +127,6 @@
   1 files updated, 0 files merged, 0 files removed, 0 files unresolved
 
   $ hg merge --debug --tool :merge-other
-    searching for copies back to rev 1
   resolving manifests
    branchmerge: True, force: False, partial: False
    ancestor: c334dc3be0da, local: 3574f3e69b1c+, remote: 521a1e40188f
@@ -165,7 +160,6 @@
   $ hg up -Cq 0
   $ echo data > a
   $ HGMERGE= hg up -y --debug --config ui.merge=
-    searching for copies back to rev 2
   resolving manifests
    branchmerge: False, force: False, partial: False
    ancestor: c334dc3be0da, local: c334dc3be0da+, remote: 521a1e40188f
--- a/tests/test-merge7.t	Mon May 20 10:08:28 2019 +0200
+++ b/tests/test-merge7.t	Mon May 20 11:40:47 2019 -0400
@@ -81,7 +81,6 @@
   new changesets 40d11a4173a8
   (run 'hg heads' to see heads, 'hg merge' to merge)
   $ hg merge --debug
-    searching for copies back to rev 1
   resolving manifests
    branchmerge: True, force: False, partial: False
    ancestor: 96b70246a118, local: 50c3a7e29886+, remote: 40d11a4173a8
--- a/tests/test-mv-cp-st-diff.t	Mon May 20 10:08:28 2019 +0200
+++ b/tests/test-mv-cp-st-diff.t	Mon May 20 11:40:47 2019 -0400
@@ -1624,6 +1624,13 @@
   $ echo change > f
   $ hg ci -m 'change f'
 
+Make a rename because we want to track renames. It is also important that the
+faulty linkrev is not only the "start" commit to ensure the linkrev will be
+used.
+
+  $ hg mv f renamed
+  $ hg ci -m renamed
+
 Make a second branch, we use a named branch to create a simple commit
 that does not touch f.
 
@@ -1631,31 +1638,28 @@
   $ hg branch -q dev
   $ hg ci -Aqm dev
 
-Graft the initial change, as f was untouched, we reuse the same entry and the
-linkrev point to the older branch.
+Graft the initial change and the rename. As f was untouched, we reuse the same
+entry and the linkrev point to the older branch.
 
   $ hg graft -q 'desc(change)'
-
-Make a rename because we want to track renames. It is also important that the
-faulty linkrev is not the "start" commit to ensure the linkrev will be used.
-
-  $ hg mv f renamed
-  $ hg ci -m renamed
+  $ hg graft -q 'desc(renamed)'
 
   $ hg log -G -T '{rev} {desc}'
-  @  4 renamed
+  @  5 renamed
+  |
+  o  4 change f
   |
-  o  3 change f
+  o  3 dev
   |
-  o  2 dev
-  |
+  | o  2 renamed
+  | |
   | o  1 change f
   |/
   o  0 empty f
   
 
-The copy tracking should still reach rev 2 (branch creation).
-accessing the parent of 4 (renamed) should not jump use to revision 1.
+The copy tracking should still reach rev 3 (branch creation).
+accessing the parent of 5 (renamed) should not jump use to revision 1.
 
   $ hg diff --git -r 'desc(dev)' -r .
   diff --git a/f b/renamed
@@ -1669,11 +1673,11 @@
 Check debug output for copy tracing
 
   $ hg status --copies --rev 'desc(dev)' --rev . --config devel.debug.copies=yes --debug
-  debug.copies: searching copies from a51f36ab1704 to 7935fd48a8f9
+  debug.copies: searching copies from a51f36ab1704 to 1f4aa1fd627b
   debug.copies: search mode: forward
-  debug.copies:    looking into rename from a51f36ab1704 to 7935fd48a8f9
-  debug.copies:      search limit: 2
-  debug.copies:      missing file to search: 1
+  debug.copies:    looking into rename from a51f36ab1704 to 1f4aa1fd627b
+  debug.copies:      search limit: 3
+  debug.copies:      missing files to search: 1
   debug.copies:        tracing file: renamed
   debug.copies:          rename of: f
   debug.copies:          time: * seconds (glob)
@@ -1681,4 +1685,11 @@
     f
   R f
 
+Check that merging across the rename works
+
+  $ echo modified >> renamed
+  $ hg co -m 4
+  merging renamed and f to f
+  0 files updated, 1 files merged, 0 files removed, 0 files unresolved
+
   $ cd ..
--- a/tests/test-narrow-share.t	Mon May 20 10:08:28 2019 +0200
+++ b/tests/test-narrow-share.t	Mon May 20 11:40:47 2019 -0400
@@ -28,6 +28,9 @@
   $ hg clone --narrow ssh://user@dummy/remote main -q \
   > --include d1 --include d3 --include d5 --include d7
 
+Ignore file called "ignored"
+  $ echo ignored > main/.hgignore
+
   $ hg share main share
   updating working directory
   4 files updated, 0 files merged, 0 files removed, 0 files unresolved
@@ -55,15 +58,19 @@
 # Make d3/f dirty
   $ echo x >> main/d3/f
   $ echo y >> main/d3/g
+  $ touch main/d3/ignored
+  $ touch main/d3/untracked
   $ hg add main/d3/g
   $ hg -R main st
   M d3/f
   A d3/g
+  ? d3/untracked
 # Make d5/f not match the dirstate timestamp even though it's clean
   $ sleep 2
   $ hg -R main st
   M d3/f
   A d3/g
+  ? d3/untracked
   $ hg -R main debugdirstate --no-dates
   n 644          2 set                 d1/f
   n 644          2 set                 d3/f
@@ -91,6 +98,8 @@
   not deleting possibly dirty file d3/f
   not deleting possibly dirty file d3/g
   not deleting possibly dirty file d5/f
+  not deleting unknown file d3/untracked
+  not deleting ignored file d3/ignored
 # d1/f, d3/f, d3/g and d5/f should no longer be reported
   $ hg -R main files
   main/d7/f
@@ -99,6 +108,8 @@
   $ find main/d* -type f | sort
   main/d3/f
   main/d3/g
+  main/d3/ignored
+  main/d3/untracked
   main/d5/f
   main/d7/f
 
@@ -131,6 +142,8 @@
   $ hg -R main st --all
   M d3/f
   ? d3/g
+  ? d3/untracked
+  I d3/ignored
   C d1/f
   C d7/f
 
--- a/tests/test-narrow-update.t	Mon May 20 10:08:28 2019 +0200
+++ b/tests/test-narrow-update.t	Mon May 20 11:40:47 2019 -0400
@@ -72,5 +72,5 @@
 
   $ hg mv inside/f1 inside/f2
   $ hg update -q 'desc("modify outside")'
-  $ hg update -q 'desc("initial")'
+  $ hg update -q 'desc("add inside and outside")'
   $ hg update -q 'desc("modify inside")'
--- a/tests/test-origbackup-conflict.t	Mon May 20 10:08:28 2019 +0200
+++ b/tests/test-origbackup-conflict.t	Mon May 20 11:40:47 2019 -0400
@@ -129,8 +129,9 @@
   b/c: replacing untracked file
   getting b/c
   creating directory: $TESTTMP/repo/.hg/badorigbackups/b
-  abort: $ENOTDIR$: *$TESTTMP/repo/.hg/badorigbackups/b* (glob)
-  [255]
-  $ cat .hg/badorigbackups
-  data
-
+  removing conflicting file: $TESTTMP/repo/.hg/badorigbackups
+  getting d
+  2 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  (activating bookmark c1)
+  $ ls .hg/badorigbackups/b
+  c
--- a/tests/test-phabricator.t	Mon May 20 10:08:28 2019 +0200
+++ b/tests/test-phabricator.t	Mon May 20 11:40:47 2019 -0400
@@ -32,6 +32,8 @@
 Basic phabread:
   $ hg phabread --test-vcr "$VCR/phabread-4480.json" D4480 | head
   # HG changeset patch
+  # Date 1536771503 0
+  # Parent  a5de21c9e3703f8e8eb064bd7d893ff2f703c66a
   exchangev2: start to implement pull with wire protocol v2
   
   Wire protocol version 2 will take a substantially different
@@ -39,8 +41,6 @@
   is concerned).
   
   This commit establishes a new exchangev2 module for holding
-  code related to exchange using wire protocol v2. I could have
-  added things to the existing exchange module. But it is already
 
 phabupdate with an accept:
   $ hg phabupdate --accept D4564 \
@@ -53,18 +53,18 @@
   $ hg ci --addremove -m 'create alpha for phabricator test €'
   adding alpha
   $ hg phabsend -r . --test-vcr "$VCR/phabsend-create-alpha.json"
-  D6054 - created - d386117f30e6: create alpha for phabricator test \xe2\x82\xac (esc)
+  D1190 - created - d386117f30e6: create alpha for phabricator test \xe2\x82\xac (esc)
   saved backup bundle to $TESTTMP/repo/.hg/strip-backup/d386117f30e6-24ffe649-phabsend.hg
   $ echo more >> alpha
   $ HGEDITOR=true hg ci --amend
-  saved backup bundle to $TESTTMP/repo/.hg/strip-backup/cb03845d6dd9-870f61a6-amend.hg
+  saved backup bundle to $TESTTMP/repo/.hg/strip-backup/a86ed7d85e86-b7a54f3b-amend.hg
   $ echo beta > beta
   $ hg ci --addremove -m 'create beta for phabricator test'
   adding beta
   $ hg phabsend -r ".^::" --test-vcr "$VCR/phabsend-update-alpha-create-beta.json"
-  D6054 - updated - 939d862f0318: create alpha for phabricator test \xe2\x82\xac (esc)
-  D6055 - created - f55f947ed0f8: create beta for phabricator test
-  saved backup bundle to $TESTTMP/repo/.hg/strip-backup/f55f947ed0f8-0d1e502e-phabsend.hg
+  D1190 - updated - d940d39fb603: create alpha for phabricator test \xe2\x82\xac (esc)
+  D1191 - created - 4b2486dfc8c7: create beta for phabricator test
+  saved backup bundle to $TESTTMP/repo/.hg/strip-backup/4b2486dfc8c7-d90584fa-phabsend.hg
   $ unset HGENCODING
 
 The amend won't explode after posting a public commit.  The local tag is left
@@ -76,13 +76,13 @@
   $ echo 'draft change' > alpha
   $ hg ci -m 'create draft change for phabricator testing'
   $ hg phabsend --amend -r '.^::' --test-vcr "$VCR/phabsend-create-public.json"
-  D5544 - created - a56e5ebd77e6: create public change for phabricator testing
-  D5545 - created - 6a0ade3e3ec2: create draft change for phabricator testing
-  warning: not updating public commit 2:a56e5ebd77e6
-  saved backup bundle to $TESTTMP/repo/.hg/strip-backup/6a0ade3e3ec2-aca7d23c-phabsend.hg
+  D1192 - created - 24ffd6bca53a: create public change for phabricator testing
+  D1193 - created - ac331633be79: create draft change for phabricator testing
+  warning: not updating public commit 2:24ffd6bca53a
+  saved backup bundle to $TESTTMP/repo/.hg/strip-backup/ac331633be79-719b961c-phabsend.hg
   $ hg tags -v
-  tip                                3:90532860b5e1
-  D5544                              2:a56e5ebd77e6 local
+  tip                                3:a19f1434f9a5
+  D1192                              2:24ffd6bca53a local
 
   $ hg debugcallconduit user.search --test-vcr "$VCR/phab-conduit.json" <<EOF
   > {
@@ -107,15 +107,15 @@
 
 Template keywords
   $ hg log -T'{rev} {phabreview|json}\n'
-  3 {"id": "D5545", "url": "https://phab.mercurial-scm.org/D5545"}
-  2 {"id": "D5544", "url": "https://phab.mercurial-scm.org/D5544"}
-  1 {"id": "D6055", "url": "https://phab.mercurial-scm.org/D6055"}
-  0 {"id": "D6054", "url": "https://phab.mercurial-scm.org/D6054"}
+  3 {"id": "D1193", "url": "https://phab.mercurial-scm.org/D1193"}
+  2 {"id": "D1192", "url": "https://phab.mercurial-scm.org/D1192"}
+  1 {"id": "D1191", "url": "https://phab.mercurial-scm.org/D1191"}
+  0 {"id": "D1190", "url": "https://phab.mercurial-scm.org/D1190"}
 
   $ hg log -T'{rev} {if(phabreview, "{phabreview.url} {phabreview.id}")}\n'
-  3 https://phab.mercurial-scm.org/D5545 D5545
-  2 https://phab.mercurial-scm.org/D5544 D5544
-  1 https://phab.mercurial-scm.org/D6055 D6055
-  0 https://phab.mercurial-scm.org/D6054 D6054
+  3 https://phab.mercurial-scm.org/D1193 D1193
+  2 https://phab.mercurial-scm.org/D1192 D1192
+  1 https://phab.mercurial-scm.org/D1191 D1191
+  0 https://phab.mercurial-scm.org/D1190 D1190
 
   $ cd ..
--- a/tests/test-push-warn.t	Mon May 20 10:08:28 2019 +0200
+++ b/tests/test-push-warn.t	Mon May 20 11:40:47 2019 -0400
@@ -791,3 +791,57 @@
   [255]
 
   $ cd ..
+
+Test regarding pushing of closed branch/branches(Issue6080)
+
+  $ hg init x
+  $ cd x
+  $ hg -q branch a
+  $ echo 0 > foo
+  $ hg -q ci -Am 0
+  $ hg -q up 0
+  $ cd ..
+
+  $ hg -q clone x z
+  $ cd z
+
+When there is a single closed branch
+
+  $ hg -q branch foo
+  $ echo 0 > foo
+  $ hg -q ci -Am 0
+  $ hg ci --close-branch -m 'closing branch foo'
+  $ hg -q up 0
+  $ hg push ../x
+  pushing to ../x
+  searching for changes
+  abort: push creates new remote branches: foo (1 closed)!
+  (use 'hg push --new-branch' to create new remote branches)
+  [255]
+
+When there is more than one closed branches
+  $ hg -q branch bar
+  $ echo 0 > bar
+  $ hg -q ci -Am 0
+  $ hg ci --close-branch -m 'closing branch bar'
+  $ hg -q up 0
+  $ hg push ../x
+  pushing to ../x
+  searching for changes
+  abort: push creates new remote branches: bar, foo (2 closed)!
+  (use 'hg push --new-branch' to create new remote branches)
+  [255]
+
+When there are more than one new branches and not all are closed
+  $ hg -q branch bar1
+  $ echo 0 > bar1
+  $ hg -q ci -Am 0
+  $ hg -q up 0
+  $ hg push ../x
+  pushing to ../x
+  searching for changes
+  abort: push creates new remote branches: bar, bar1, foo (2 closed)!
+  (use 'hg push --new-branch' to create new remote branches)
+  [255]
+
+  $ cd ..
--- a/tests/test-rebase-conflicts.t	Mon May 20 10:08:28 2019 +0200
+++ b/tests/test-rebase-conflicts.t	Mon May 20 11:40:47 2019 -0400
@@ -248,9 +248,6 @@
   getting f1.txt
    merge against 9:e31216eec445
      detach base 8:8e4e2c1a07ae
-    searching for copies back to rev 3
-    unmatched files in other (from topological common ancestor):
-     f2.txt
   resolving manifests
    branchmerge: True, force: True, partial: False
    ancestor: 8e4e2c1a07ae, local: 4bc80088dc6b+, remote: e31216eec445
@@ -268,9 +265,6 @@
    already in destination
    merge against 10:2f2496ddf49d
      detach base 9:e31216eec445
-    searching for copies back to rev 3
-    unmatched files in other (from topological common ancestor):
-     f2.txt
   resolving manifests
    branchmerge: True, force: True, partial: False
    ancestor: e31216eec445, local: 19c888675e13+, remote: 2f2496ddf49d
--- a/tests/test-rebase-inmemory.t	Mon May 20 10:08:28 2019 +0200
+++ b/tests/test-rebase-inmemory.t	Mon May 20 11:40:47 2019 -0400
@@ -763,6 +763,44 @@
 
   $ cd ..
 
+Test rebasing a commit with copy information
+
+  $ hg init rebase-rename
+  $ cd rebase-rename
+  $ echo a > a
+  $ hg ci -Aqm 'add a'
+  $ echo a2 > a
+  $ hg ci -m 'modify a'
+  $ hg co -q 0
+  $ hg mv a b
+  $ hg ci -qm 'rename a to b'
+  $ hg rebase -d 1
+  rebasing 2:b977edf6f839 "rename a to b" (tip)
+  merging a and b to b
+  saved backup bundle to $TESTTMP/rebase-rename/.hg/strip-backup/b977edf6f839-0864f570-rebase.hg
+  $ hg st --copies --change .
+  A b
+    a
+  R a
+  $ cd ..
+
+Rebase across a copy with --collapse
+
+  $ hg init rebase-rename-collapse
+  $ cd rebase-rename-collapse
+  $ echo a > a
+  $ hg ci -Aqm 'add a'
+  $ hg mv a b
+  $ hg ci -m 'rename a to b'
+  $ hg co -q 0
+  $ echo a2 > a
+  $ hg ci -qm 'modify a'
+  $ hg rebase -r . -d 1 --collapse
+  rebasing 2:41c4ea50d4cf "modify a" (tip)
+  merging b and a to b
+  saved backup bundle to $TESTTMP/rebase-rename-collapse/.hg/strip-backup/41c4ea50d4cf-b90b7994-rebase.hg
+  $ cd ..
+
 Test rebasing when the file we are merging in destination is empty
 
   $ hg init test
--- a/tests/test-remotefilelog-sparse.t	Mon May 20 10:08:28 2019 +0200
+++ b/tests/test-remotefilelog-sparse.t	Mon May 20 11:40:47 2019 -0400
@@ -98,12 +98,5 @@
   $ clearcache
   $ hg prefetch -r '. + .^' -I x -I z
   4 files fetched over 1 fetches - (4 misses, 0.00% hit ratio) over * (glob)
-Originally this was testing that the rebase doesn't fetch pointless
-blobs. Right now it fails because core's sparse can't load a spec from
-the working directory. Presumably there's a fix, but I'm not sure what it is.
   $ hg rebase -d 2 --keep
   rebasing 1:876b1317060d "x2" (foo)
-  transaction abort!
-  rollback completed
-  abort: cannot parse sparse patterns from working directory
-  [255]
--- a/tests/test-rename-dir-merge.t	Mon May 20 10:08:28 2019 +0200
+++ b/tests/test-rename-dir-merge.t	Mon May 20 11:40:47 2019 -0400
@@ -24,7 +24,6 @@
   created new head
 
   $ hg merge --debug 1
-    searching for copies back to rev 1
     unmatched files in local:
      a/c
     unmatched files in other:
@@ -70,7 +69,6 @@
   $ hg co -C 1
   0 files updated, 0 files merged, 1 files removed, 0 files unresolved
   $ hg merge --debug 2
-    searching for copies back to rev 1
     unmatched files in local:
      b/a
      b/b
--- a/tests/test-rename-merge1.t	Mon May 20 10:08:28 2019 +0200
+++ b/tests/test-rename-merge1.t	Mon May 20 11:40:47 2019 -0400
@@ -22,7 +22,6 @@
   created new head
 
   $ hg merge -y --debug
-    searching for copies back to rev 1
     unmatched files in local:
      c2
     unmatched files in other:
@@ -76,7 +75,7 @@
   $ hg cp b b3
   $ hg cp b b4
   $ hg ci -A -m 'copy b twice'
-  $ hg up eb92d88a9712
+  $ hg up '.^'
   0 files updated, 0 files merged, 2 files removed, 0 files unresolved
   $ hg up
   2 files updated, 0 files merged, 0 files removed, 0 files unresolved
@@ -88,7 +87,7 @@
   $ hg cp b b3
   $ hg mv b b4
   $ hg ci -A -m 'divergent renames in same changeset'
-  $ hg up c761c6948de0
+  $ hg up '.^'
   1 files updated, 0 files merged, 2 files removed, 0 files unresolved
   $ hg up
   2 files updated, 0 files merged, 1 files removed, 0 files unresolved
@@ -168,7 +167,6 @@
   $ hg commit -m "deleted file"
   created new head
   $ hg merge --debug
-    searching for copies back to rev 1
     unmatched files in other:
      newfile
     all copies found (* = to merge, ! = divergent, % = renamed and deleted):
@@ -186,3 +184,52 @@
   $ hg status
   M newfile
   $ cd ..
+
+Check that file is considered unrelated when deleted and recreated
+
+  $ hg init unrelated
+  $ cd unrelated
+  $ echo foo > file
+  $ hg add file
+  $ hg commit -m "added file"
+  $ hg cp file newfile
+  $ hg commit -m "copy file"
+  $ hg update 0
+  0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+  $ hg rm file
+  $ hg commit -m "deleted file"
+  created new head
+  $ echo bar > file
+  $ hg add file
+  $ hg ci -m 'recreate file'
+  $ hg log -G -T '{rev} {desc}\n'
+  @  3 recreate file
+  |
+  o  2 deleted file
+  |
+  | o  1 copy file
+  |/
+  o  0 added file
+  
+BROKEN: this is inconsistent with `hg merge` (below), which doesn't consider
+'file' renamed same since it was deleted for a while
+  $ hg st --copies --rev 3 --rev 1
+  M file
+  A newfile
+    file
+  $ hg merge --debug 1
+    unmatched files in other:
+     newfile
+    all copies found (* = to merge, ! = divergent, % = renamed and deleted):
+     src: 'file' -> dst: 'newfile' 
+    checking for directory renames
+  resolving manifests
+   branchmerge: True, force: False, partial: False
+   ancestor: 19d7f95df299, local: 4e4a42b1cbdf+, remote: 45b14aae7432
+   newfile: remote created -> g
+  getting newfile
+  1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  (branch merge, don't forget to commit)
+  $ hg status
+  M newfile
+  $ cd ..
--- a/tests/test-rename-merge2.t	Mon May 20 10:08:28 2019 +0200
+++ b/tests/test-rename-merge2.t	Mon May 20 11:40:47 2019 -0400
@@ -68,7 +68,6 @@
   >         hg add $2 2> /dev/null
   >     fi
   > }
-  $ uc() { up $1; hg cp $1 $2; } # update + copy
   $ um() { up $1; hg mv $1 $2; }
   $ nc() { hg cp $1 $2; } # just copy
   $ nm() { hg mv $1 $2; } # just move
@@ -77,7 +76,6 @@
   --------------
   test L:up a   R:nc a b W:       - 1  get local a to b
   --------------
-    searching for copies back to rev 1
     unmatched files in other:
      b
     all copies found (* = to merge, ! = divergent, % = renamed and deleted):
@@ -116,7 +114,6 @@
   --------------
   test L:nc a b R:up a   W:       - 2  get rem change to a and b
   --------------
-    searching for copies back to rev 1
     unmatched files in local:
      b
     all copies found (* = to merge, ! = divergent, % = renamed and deleted):
@@ -156,7 +153,6 @@
   --------------
   test L:up a   R:nm a b W:       - 3  get local a change to b, remove a
   --------------
-    searching for copies back to rev 1
     unmatched files in other:
      b
     all copies found (* = to merge, ! = divergent, % = renamed and deleted):
@@ -195,7 +191,6 @@
   --------------
   test L:nm a b R:up a   W:       - 4  get remote change to b
   --------------
-    searching for copies back to rev 1
     unmatched files in local:
      b
     all copies found (* = to merge, ! = divergent, % = renamed and deleted):
@@ -233,7 +228,6 @@
   --------------
   test L:       R:nc a b W:       - 5  get b
   --------------
-    searching for copies back to rev 1
     unmatched files in other:
      b
     all copies found (* = to merge, ! = divergent, % = renamed and deleted):
@@ -266,7 +260,6 @@
   --------------
   test L:nc a b R:       W:       - 6  nothing
   --------------
-    searching for copies back to rev 1
     unmatched files in local:
      b
     all copies found (* = to merge, ! = divergent, % = renamed and deleted):
@@ -298,7 +291,6 @@
   --------------
   test L:       R:nm a b W:       - 7  get b
   --------------
-    searching for copies back to rev 1
     unmatched files in other:
      b
     all copies found (* = to merge, ! = divergent, % = renamed and deleted):
@@ -332,7 +324,6 @@
   --------------
   test L:nm a b R:       W:       - 8  nothing
   --------------
-    searching for copies back to rev 1
     unmatched files in local:
      b
     all copies found (* = to merge, ! = divergent, % = renamed and deleted):
@@ -363,9 +354,9 @@
   --------------
   test L:um a b R:um a b W:       - 9  do merge with ancestor in a
   --------------
-    searching for copies back to rev 1
-    unmatched files new in both:
-     b
+    all copies found (* = to merge, ! = divergent, % = renamed and deleted):
+     src: 'a' -> dst: 'b' *
+    checking for directory renames
   resolving manifests
    branchmerge: True, force: False, partial: False
    ancestor: 924404dff337, local: 62e7bf090eba+, remote: 49b6d8032493
@@ -404,7 +395,6 @@
   --------------
   test L:nm a b R:nm a c W:       - 11 get c, keep b
   --------------
-    searching for copies back to rev 1
     unmatched files in local:
      b
     unmatched files in other:
@@ -443,9 +433,9 @@
   --------------
   test L:nc a b R:up b   W:       - 12 merge b no ancestor
   --------------
-    searching for copies back to rev 1
-    unmatched files new in both:
-     b
+    all copies found (* = to merge, ! = divergent, % = renamed and deleted):
+     src: 'a' -> dst: 'b' 
+    checking for directory renames
   resolving manifests
    branchmerge: True, force: False, partial: False
    ancestor: 924404dff337, local: 86a2aa42fc76+, remote: af30c7647fc7
@@ -482,9 +472,9 @@
   --------------
   test L:up b   R:nm a b W:       - 13 merge b no ancestor
   --------------
-    searching for copies back to rev 1
-    unmatched files new in both:
-     b
+    all copies found (* = to merge, ! = divergent, % = renamed and deleted):
+     src: 'a' -> dst: 'b' 
+    checking for directory renames
   resolving manifests
    branchmerge: True, force: False, partial: False
    ancestor: 924404dff337, local: 59318016310c+, remote: bdb19105162a
@@ -522,9 +512,9 @@
   --------------
   test L:nc a b R:up a b W:       - 14 merge b no ancestor
   --------------
-    searching for copies back to rev 1
-    unmatched files new in both:
-     b
+    all copies found (* = to merge, ! = divergent, % = renamed and deleted):
+     src: 'a' -> dst: 'b' 
+    checking for directory renames
   resolving manifests
    branchmerge: True, force: False, partial: False
    ancestor: 924404dff337, local: 86a2aa42fc76+, remote: 8dbce441892a
@@ -562,9 +552,9 @@
   --------------
   test L:up b   R:nm a b W:       - 15 merge b no ancestor, remove a
   --------------
-    searching for copies back to rev 1
-    unmatched files new in both:
-     b
+    all copies found (* = to merge, ! = divergent, % = renamed and deleted):
+     src: 'a' -> dst: 'b' 
+    checking for directory renames
   resolving manifests
    branchmerge: True, force: False, partial: False
    ancestor: 924404dff337, local: 59318016310c+, remote: bdb19105162a
@@ -602,9 +592,9 @@
   --------------
   test L:nc a b R:up a b W:       - 16 get a, merge b no ancestor
   --------------
-    searching for copies back to rev 1
-    unmatched files new in both:
-     b
+    all copies found (* = to merge, ! = divergent, % = renamed and deleted):
+     src: 'a' -> dst: 'b' 
+    checking for directory renames
   resolving manifests
    branchmerge: True, force: False, partial: False
    ancestor: 924404dff337, local: 86a2aa42fc76+, remote: 8dbce441892a
@@ -642,9 +632,9 @@
   --------------
   test L:up a b R:nc a b W:       - 17 keep a, merge b no ancestor
   --------------
-    searching for copies back to rev 1
-    unmatched files new in both:
-     b
+    all copies found (* = to merge, ! = divergent, % = renamed and deleted):
+     src: 'a' -> dst: 'b' 
+    checking for directory renames
   resolving manifests
    branchmerge: True, force: False, partial: False
    ancestor: 924404dff337, local: 0b76e65c8289+, remote: 4ce40f5aca24
@@ -681,9 +671,9 @@
   --------------
   test L:nm a b R:up a b W:       - 18 merge b no ancestor
   --------------
-    searching for copies back to rev 1
-    unmatched files new in both:
-     b
+    all copies found (* = to merge, ! = divergent, % = renamed and deleted):
+     src: 'a' -> dst: 'b' 
+    checking for directory renames
   resolving manifests
    branchmerge: True, force: False, partial: False
    ancestor: 924404dff337, local: 02963e448370+, remote: 8dbce441892a
@@ -726,9 +716,9 @@
   --------------
   test L:up a b R:nm a b W:       - 19 merge b no ancestor, prompt remove a
   --------------
-    searching for copies back to rev 1
-    unmatched files new in both:
-     b
+    all copies found (* = to merge, ! = divergent, % = renamed and deleted):
+     src: 'a' -> dst: 'b' 
+    checking for directory renames
   resolving manifests
    branchmerge: True, force: False, partial: False
    ancestor: 924404dff337, local: 0b76e65c8289+, remote: bdb19105162a
@@ -772,7 +762,6 @@
   --------------
   test L:up a   R:um a b W:       - 20 merge a and b to b, remove a
   --------------
-    searching for copies back to rev 1
     unmatched files in other:
      b
     all copies found (* = to merge, ! = divergent, % = renamed and deleted):
@@ -815,7 +804,6 @@
   --------------
   test L:um a b R:up a   W:       - 21 merge a and b to b
   --------------
-    searching for copies back to rev 1
     unmatched files in local:
      b
     all copies found (* = to merge, ! = divergent, % = renamed and deleted):
@@ -860,7 +848,6 @@
   --------------
   test L:nm a b R:up a c W:       - 23 get c, keep b
   --------------
-    searching for copies back to rev 1
     unmatched files in local:
      b
     unmatched files in other:
@@ -941,7 +928,6 @@
   $ echo m > 7/f
   $ echo m > 8/f
   $ hg merge -f --tool internal:dump -v --debug -r2 | sed '/^resolving manifests/,$d' 2> /dev/null
-    searching for copies back to rev 1
     unmatched files in local:
      5/g
      6/g
@@ -949,10 +935,8 @@
      3/g
      4/g
      7/f
-    unmatched files new in both:
-     0/f
-     1/g
     all copies found (* = to merge, ! = divergent, % = renamed and deleted):
+     src: '1/f' -> dst: '1/g' *
      src: '3/f' -> dst: '3/g' *
      src: '4/f' -> dst: '4/g' *
      src: '5/f' -> dst: '5/g' *
--- a/tests/test-revset.t	Mon May 20 10:08:28 2019 +0200
+++ b/tests/test-revset.t	Mon May 20 11:40:47 2019 -0400
@@ -1962,25 +1962,26 @@
   2147483647
 
 Test working-directory integer revision and node id
-(BUG: '0:wdir()' is still needed to populate wdir revision)
 
-  $ hg debugrevspec '0:wdir() & 2147483647'
+  $ hg debugrevspec '2147483647'
   2147483647
-  $ hg debugrevspec '0:wdir() & rev(2147483647)'
+  $ hg debugrevspec 'rev(2147483647)'
+  2147483647
+  $ hg debugrevspec 'ffffffffffffffffffffffffffffffffffffffff'
   2147483647
-  $ hg debugrevspec '0:wdir() & ffffffffffffffffffffffffffffffffffffffff'
+  $ hg debugrevspec 'ffffffffffff'
   2147483647
-  $ hg debugrevspec '0:wdir() & ffffffffffff'
+  $ hg debugrevspec 'id(ffffffffffffffffffffffffffffffffffffffff)'
   2147483647
-  $ hg debugrevspec '0:wdir() & id(ffffffffffffffffffffffffffffffffffffffff)'
+  $ hg debugrevspec 'id(ffffffffffff)'
   2147483647
-  $ hg debugrevspec '0:wdir() & id(ffffffffffff)'
+  $ hg debugrevspec 'ffffffffffff+000000000000'
   2147483647
+  -1
 
   $ cd ..
 
 Test short 'ff...' hash collision
-(BUG: '0:wdir()' is still needed to populate wdir revision)
 
   $ hg init wdir-hashcollision
   $ cd wdir-hashcollision
@@ -2006,21 +2007,21 @@
   $ hg debugobsolete fffbae3886c8fbb2114296380d276fd37715d571
   obsoleted 1 changesets
 
-  $ hg debugrevspec '0:wdir() & fff'
+  $ hg debugrevspec 'fff'
   abort: 00changelog.i@fff: ambiguous identifier!
   [255]
-  $ hg debugrevspec '0:wdir() & ffff'
+  $ hg debugrevspec 'ffff'
   abort: 00changelog.i@ffff: ambiguous identifier!
   [255]
-  $ hg debugrevspec '0:wdir() & fffb'
+  $ hg debugrevspec 'fffb'
   abort: 00changelog.i@fffb: ambiguous identifier!
   [255]
 BROKEN should be '2' (node lookup uses unfiltered repo)
-  $ hg debugrevspec '0:wdir() & id(fffb)'
+  $ hg debugrevspec 'id(fffb)'
 BROKEN should be '2' (node lookup uses unfiltered repo)
-  $ hg debugrevspec '0:wdir() & ffff8'
+  $ hg debugrevspec 'ffff8'
   4
-  $ hg debugrevspec '0:wdir() & fffff'
+  $ hg debugrevspec 'fffff'
   2147483647
 
   $ cd ..
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-rust-discovery.py	Mon May 20 11:40:47 2019 -0400
@@ -0,0 +1,111 @@
+from __future__ import absolute_import
+import unittest
+
+try:
+    from mercurial import rustext
+    rustext.__name__  # trigger immediate actual import
+except ImportError:
+    rustext = None
+else:
+    # this would fail already without appropriate ancestor.__package__
+    from mercurial.rustext.discovery import (
+        PartialDiscovery,
+    )
+
+try:
+    from mercurial.cext import parsers as cparsers
+except ImportError:
+    cparsers = None
+
+# picked from test-parse-index2, copied rather than imported
+# so that it stays stable even if test-parse-index2 changes or disappears.
+data_non_inlined = (
+    b'\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01D\x19'
+    b'\x00\x07e\x12\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff'
+    b'\xff\xff\xff\xff\xd1\xf4\xbb\xb0\xbe\xfc\x13\xbd\x8c\xd3\x9d'
+    b'\x0f\xcd\xd9;\x8c\x07\x8cJ/\x00\x00\x00\x00\x00\x00\x00\x00\x00'
+    b'\x00\x00\x00\x00\x00\x00\x01D\x19\x00\x00\x00\x00\x00\xdf\x00'
+    b'\x00\x01q\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\xff'
+    b'\xff\xff\xff\xc1\x12\xb9\x04\x96\xa4Z1t\x91\xdfsJ\x90\xf0\x9bh'
+    b'\x07l&\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
+    b'\x00\x01D\xf8\x00\x00\x00\x00\x01\x1b\x00\x00\x01\xb8\x00\x00'
+    b'\x00\x01\x00\x00\x00\x02\x00\x00\x00\x01\xff\xff\xff\xff\x02\n'
+    b'\x0e\xc6&\xa1\x92\xae6\x0b\x02i\xfe-\xe5\xbao\x05\xd1\xe7\x00'
+    b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01F'
+    b'\x13\x00\x00\x00\x00\x01\xec\x00\x00\x03\x06\x00\x00\x00\x01'
+    b'\x00\x00\x00\x03\x00\x00\x00\x02\xff\xff\xff\xff\x12\xcb\xeby1'
+    b'\xb6\r\x98B\xcb\x07\xbd`\x8f\x92\xd9\xc4\x84\xbdK\x00\x00\x00'
+    b'\x00\x00\x00\x00\x00\x00\x00\x00\x00'
+    )
+
+
+@unittest.skipIf(rustext is None or cparsers is None,
+                 "rustext or the C Extension parsers module "
+                 "discovery relies on is not available")
+class rustdiscoverytest(unittest.TestCase):
+    """Test the correctness of binding to Rust code.
+
+    This test is merely for the binding to Rust itself: extraction of
+    Python variable, giving back the results etc.
+
+    It is not meant to test the algorithmic correctness of the provided
+    methods. Hence the very simple embedded index data is good enough.
+
+    Algorithmic correctness is asserted by the Rust unit tests.
+    """
+
+    def parseindex(self):
+        return cparsers.parse_index2(data_non_inlined, False)[0]
+
+    def testindex(self):
+        idx = self.parseindex()
+        # checking our assumptions about the index binary data:
+        self.assertEqual({i: (r[5], r[6]) for i, r in enumerate(idx)},
+                         {0: (-1, -1),
+                          1: (0, -1),
+                          2: (1, -1),
+                          3: (2, -1)})
+
+    def testaddcommonsmissings(self):
+        idx = self.parseindex()
+        disco = PartialDiscovery(idx, [3])
+        self.assertFalse(disco.hasinfo())
+        self.assertFalse(disco.iscomplete())
+
+        disco.addcommons([1])
+        self.assertTrue(disco.hasinfo())
+        self.assertFalse(disco.iscomplete())
+
+        disco.addmissings([2])
+        self.assertTrue(disco.hasinfo())
+        self.assertTrue(disco.iscomplete())
+
+        self.assertEqual(disco.commonheads(), {1})
+
+    def testaddmissingsstats(self):
+        idx = self.parseindex()
+        disco = PartialDiscovery(idx, [3])
+        self.assertIsNone(disco.stats()['undecided'], None)
+
+        disco.addmissings([2])
+        self.assertEqual(disco.stats()['undecided'], 2)
+
+    def testaddinfocommonfirst(self):
+        idx = self.parseindex()
+        disco = PartialDiscovery(idx, [3])
+        disco.addinfo([(1, True), (2, False)])
+        self.assertTrue(disco.hasinfo())
+        self.assertTrue(disco.iscomplete())
+        self.assertEqual(disco.commonheads(), {1})
+
+    def testaddinfomissingfirst(self):
+        idx = self.parseindex()
+        disco = PartialDiscovery(idx, [3])
+        disco.addinfo([(2, False), (1, True)])
+        self.assertTrue(disco.hasinfo())
+        self.assertTrue(disco.iscomplete())
+        self.assertEqual(disco.commonheads(), {1})
+
+if __name__ == '__main__':
+    import silenttestrunner
+    silenttestrunner.main(__name__)
--- a/tests/test-server-view.t	Mon May 20 10:08:28 2019 +0200
+++ b/tests/test-server-view.t	Mon May 20 11:40:47 2019 -0400
@@ -34,5 +34,30 @@
   date:        Thu Jan 01 00:00:00 1970 +0000
   summary:     r0
   
+
+Check same result using `experimental.extra-filter-revs`
+
+  $ hg -R test --config experimental.extra-filter-revs='not public()' serve -p $HGPORT1 -d --pid-file=hg2.pid -E errors.log
+  $ cat hg2.pid >> $DAEMON_PIDS
+  $ hg -R test2 incoming http://foo:xyzzy@localhost:$HGPORT1/
+  comparing with http://foo:***@localhost:$HGPORT1/
+  changeset:   0:1ea73414a91b
+  tag:         tip
+  user:        debugbuilddag
+  date:        Thu Jan 01 00:00:00 1970 +0000
+  summary:     r0
+  
+  $ hg -R test --config experimental.extra-filter-revs='not public()' debugupdatecache
+  $ ls -1 test/.hg/cache/
+  branch2-base%89c45d2fa07e
+  branch2-served
+  hgtagsfnodes1
+  rbc-names-v1
+  rbc-revs-v1
+  tags2
+  tags2-served%89c45d2fa07e
+
+cleanup
+
   $ cat errors.log
   $ killdaemons.py
--- a/tests/test-setdiscovery.t	Mon May 20 10:08:28 2019 +0200
+++ b/tests/test-setdiscovery.t	Mon May 20 11:40:47 2019 -0400
@@ -926,7 +926,7 @@
   common heads: 7ead0cba2838
 
 
-One with >200 heads, which used to use up all of the sample:
+One with >200 heads. We now switch to send them all in the initial roundtrip, but still do sampling for the later request.
 
   $ hg init manyheads
   $ cd manyheads
@@ -974,20 +974,17 @@
   searching for changes
   taking quick initial sample
   searching: 2 queries
-  query 2; still undecided: 1240, sample size is: 100
+  query 2; still undecided: 1080, sample size is: 100
   sampling from both directions
   searching: 3 queries
-  query 3; still undecided: 1140, sample size is: 200
+  query 3; still undecided: 980, sample size is: 200
   sampling from both directions
   searching: 4 queries
   query 4; still undecided: \d+, sample size is: 200 (re)
   sampling from both directions
   searching: 5 queries
-  query 5; still undecided: \d+, sample size is: 200 (re)
-  sampling from both directions
-  searching: 6 queries
-  query 6; still undecided: \d+, sample size is: \d+ (re)
-  6 total queries in *.????s (glob)
+  query 5; still undecided: 195, sample size is: 195
+  5 total queries in *.????s (glob)
   elapsed time:  * seconds (glob)
   heads summary:
     total common heads:          1
@@ -1116,6 +1113,6 @@
   $ hg -R r1 --config extensions.blackbox= blackbox --config blackbox.track=
   * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> serve --cmdserver chgunix * (glob) (chg !)
   * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> -R r1 outgoing r2 *-T{rev} * --config *extensions.blackbox=* (glob)
-  * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> found 101 common and 1 unknown server heads, 2 roundtrips in *.????s (glob)
+  * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> found 101 common and 1 unknown server heads, 1 roundtrips in *.????s (glob)
   * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> -R r1 outgoing r2 *-T{rev} * --config *extensions.blackbox=* exited 0 after *.?? seconds (glob)
   $ cd ..
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-share-bookmarks.t	Mon May 20 11:40:47 2019 -0400
@@ -0,0 +1,281 @@
+#testcases vfs svfs
+
+  $ echo "[extensions]"      >> $HGRCPATH
+  $ echo "share = "          >> $HGRCPATH
+
+#if svfs
+  $ echo "[format]"                  >> $HGRCPATH
+  $ echo "bookmarks-in-store = yes " >> $HGRCPATH
+#endif
+
+prepare repo1
+
+  $ hg init repo1
+  $ cd repo1
+  $ echo a > a
+  $ hg commit -A -m'init'
+  adding a
+  $ echo a >> a
+  $ hg commit -m'change in shared clone'
+  $ echo b > b
+  $ hg commit -A -m'another file'
+  adding b
+
+share it
+
+  $ cd ..
+  $ hg share repo1 repo2
+  updating working directory
+  2 files updated, 0 files merged, 0 files removed, 0 files unresolved
+
+test sharing bookmarks
+
+  $ hg share -B repo1 repo3
+  updating working directory
+  2 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  $ cd repo1
+  $ hg bookmark bm1
+  $ hg bookmarks
+   * bm1                       2:c2e0ac586386
+  $ cd ../repo2
+  $ hg book bm2
+  $ hg bookmarks
+     bm1                       2:c2e0ac586386 (svfs !)
+   * bm2                       2:c2e0ac586386
+  $ cd ../repo3
+  $ hg bookmarks
+     bm1                       2:c2e0ac586386
+     bm2                       2:c2e0ac586386 (svfs !)
+  $ hg book bm3
+  $ hg bookmarks
+     bm1                       2:c2e0ac586386
+     bm2                       2:c2e0ac586386 (svfs !)
+   * bm3                       2:c2e0ac586386
+  $ cd ../repo1
+  $ hg bookmarks
+   * bm1                       2:c2e0ac586386
+     bm2                       2:c2e0ac586386 (svfs !)
+     bm3                       2:c2e0ac586386
+
+check whether HG_PENDING makes pending changes only in relatd
+repositories visible to an external hook.
+
+In "hg share" case, another transaction can't run in other
+repositories sharing same source repository, because starting
+transaction requires locking store of source repository.
+
+Therefore, this test scenario ignores checking visibility of
+.hg/bookmarks.pending in repo2, which shares repo1 without bookmarks.
+
+  $ cat > $TESTTMP/checkbookmarks.sh <<EOF
+  > echo "@repo1"
+  > hg -R "$TESTTMP/repo1" bookmarks
+  > echo "@repo2"
+  > hg -R "$TESTTMP/repo2" bookmarks
+  > echo "@repo3"
+  > hg -R "$TESTTMP/repo3" bookmarks
+  > exit 1 # to avoid adding new bookmark for subsequent tests
+  > EOF
+
+  $ cd ../repo1
+  $ hg --config hooks.pretxnclose="sh $TESTTMP/checkbookmarks.sh" -q book bmX
+  @repo1
+     bm1                       2:c2e0ac586386
+     bm2                       2:c2e0ac586386 (svfs !)
+     bm3                       2:c2e0ac586386
+   * bmX                       2:c2e0ac586386
+  @repo2
+     bm1                       2:c2e0ac586386 (svfs !)
+   * bm2                       2:c2e0ac586386
+     bm3                       2:c2e0ac586386 (svfs !)
+  @repo3
+     bm1                       2:c2e0ac586386
+     bm2                       2:c2e0ac586386 (svfs !)
+   * bm3                       2:c2e0ac586386
+     bmX                       2:c2e0ac586386 (vfs !)
+  transaction abort!
+  rollback completed
+  abort: pretxnclose hook exited with status 1
+  [255]
+  $ hg book bm1
+
+FYI, in contrast to above test, bmX is invisible in repo1 (= shared
+src), because (1) HG_PENDING refers only repo3 and (2)
+"bookmarks.pending" is written only into repo3.
+
+  $ cd ../repo3
+  $ hg --config hooks.pretxnclose="sh $TESTTMP/checkbookmarks.sh" -q book bmX
+  @repo1
+   * bm1                       2:c2e0ac586386
+     bm2                       2:c2e0ac586386 (svfs !)
+     bm3                       2:c2e0ac586386
+  @repo2
+     bm1                       2:c2e0ac586386 (svfs !)
+   * bm2                       2:c2e0ac586386
+     bm3                       2:c2e0ac586386 (svfs !)
+  @repo3
+     bm1                       2:c2e0ac586386
+     bm2                       2:c2e0ac586386 (svfs !)
+     bm3                       2:c2e0ac586386
+   * bmX                       2:c2e0ac586386
+  transaction abort!
+  rollback completed
+  abort: pretxnclose hook exited with status 1
+  [255]
+  $ hg book bm3
+
+clean up bm2 since it's uninteresting (not shared in the vfs case and
+same as bm3 in the svfs case)
+  $ cd ../repo2
+  $ hg book -d bm2
+
+  $ cd ../repo1
+
+test that commits work
+
+  $ echo 'shared bookmarks' > a
+  $ hg commit -m 'testing shared bookmarks'
+  $ hg bookmarks
+   * bm1                       3:b87954705719
+     bm3                       2:c2e0ac586386
+  $ cd ../repo3
+  $ hg bookmarks
+     bm1                       3:b87954705719
+   * bm3                       2:c2e0ac586386
+  $ echo 'more shared bookmarks' > a
+  $ hg commit -m 'testing shared bookmarks'
+  created new head
+  $ hg bookmarks
+     bm1                       3:b87954705719
+   * bm3                       4:62f4ded848e4
+  $ cd ../repo1
+  $ hg bookmarks
+   * bm1                       3:b87954705719
+     bm3                       4:62f4ded848e4
+  $ cd ..
+
+test pushing bookmarks works
+
+  $ hg clone repo3 repo4
+  updating to branch default
+  2 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  $ cd repo4
+  $ hg boo bm4
+  $ echo foo > b
+  $ hg commit -m 'foo in b'
+  $ hg boo
+     bm1                       3:b87954705719
+     bm3                       4:62f4ded848e4
+   * bm4                       5:92793bfc8cad
+  $ hg push -B bm4
+  pushing to $TESTTMP/repo3
+  searching for changes
+  adding changesets
+  adding manifests
+  adding file changes
+  added 1 changesets with 1 changes to 1 files
+  exporting bookmark bm4
+  $ cd ../repo1
+  $ hg bookmarks
+   * bm1                       3:b87954705719
+     bm3                       4:62f4ded848e4
+     bm4                       5:92793bfc8cad
+  $ cd ../repo3
+  $ hg bookmarks
+     bm1                       3:b87954705719
+   * bm3                       4:62f4ded848e4
+     bm4                       5:92793bfc8cad
+  $ cd ..
+
+test behavior when sharing a shared repo
+
+  $ hg share -B repo3 missingdir/repo5
+  updating working directory
+  2 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  $ cd missingdir/repo5
+  $ hg book
+     bm1                       3:b87954705719
+     bm3                       4:62f4ded848e4
+     bm4                       5:92793bfc8cad
+  $ cd ../..
+
+test what happens when an active bookmark is deleted
+
+  $ cd repo1
+  $ hg boo -d bm3
+  $ hg boo
+   * bm1                       3:b87954705719
+     bm4                       5:92793bfc8cad
+  $ cd ../repo3
+  $ hg boo
+     bm1                       3:b87954705719
+     bm4                       5:92793bfc8cad
+  $ cd ..
+
+verify that bookmarks are not written on failed transaction
+
+  $ cat > failpullbookmarks.py << EOF
+  > """A small extension that makes bookmark pulls fail, for testing"""
+  > from __future__ import absolute_import
+  > from mercurial import (
+  >   error,
+  >   exchange,
+  >   extensions,
+  > )
+  > def _pullbookmarks(orig, pullop):
+  >     orig(pullop)
+  >     raise error.HookAbort('forced failure by extension')
+  > def extsetup(ui):
+  >     extensions.wrapfunction(exchange, '_pullbookmarks', _pullbookmarks)
+  > EOF
+  $ cd repo4
+  $ hg boo
+     bm1                       3:b87954705719
+     bm3                       4:62f4ded848e4
+   * bm4                       5:92793bfc8cad
+  $ cd ../repo3
+  $ hg boo
+     bm1                       3:b87954705719
+     bm4                       5:92793bfc8cad
+  $ hg --config "extensions.failpullbookmarks=$TESTTMP/failpullbookmarks.py" pull $TESTTMP/repo4
+  pulling from $TESTTMP/repo4
+  searching for changes
+  no changes found
+  adding remote bookmark bm3
+  abort: forced failure by extension
+  [255]
+  $ hg boo
+     bm1                       3:b87954705719
+     bm4                       5:92793bfc8cad
+  $ hg pull $TESTTMP/repo4
+  pulling from $TESTTMP/repo4
+  searching for changes
+  no changes found
+  adding remote bookmark bm3
+  1 local changesets published
+  $ hg boo
+     bm1                       3:b87954705719
+   * bm3                       4:62f4ded848e4
+     bm4                       5:92793bfc8cad
+  $ cd ..
+
+verify bookmark behavior after unshare
+
+  $ cd repo3
+  $ hg unshare
+  $ hg boo
+     bm1                       3:b87954705719
+   * bm3                       4:62f4ded848e4
+     bm4                       5:92793bfc8cad
+  $ hg boo -d bm4
+  $ hg boo bm5
+  $ hg boo
+     bm1                       3:b87954705719
+     bm3                       4:62f4ded848e4
+   * bm5                       4:62f4ded848e4
+  $ cd ../repo1
+  $ hg boo
+   * bm1                       3:b87954705719
+     bm3                       4:62f4ded848e4
+     bm4                       5:92793bfc8cad
+  $ cd ..
--- a/tests/test-share.t	Mon May 20 10:08:28 2019 +0200
+++ b/tests/test-share.t	Mon May 20 11:40:47 2019 -0400
@@ -157,118 +157,9 @@
   $ cd ..
 
 
-test sharing bookmarks
-
-  $ hg share -B repo1 repo3
-  updating working directory
-  2 files updated, 0 files merged, 0 files removed, 0 files unresolved
-  $ cd repo1
-  $ hg bookmark bm1
-  $ hg bookmarks
-   * bm1                       2:c2e0ac586386
-  $ cd ../repo2
-  $ hg book bm2
-  $ hg bookmarks
-   * bm2                       3:0e6e70d1d5f1
-  $ cd ../repo3
-  $ hg bookmarks
-     bm1                       2:c2e0ac586386
-  $ hg book bm3
-  $ hg bookmarks
-     bm1                       2:c2e0ac586386
-   * bm3                       2:c2e0ac586386
-  $ cd ../repo1
-  $ hg bookmarks
-   * bm1                       2:c2e0ac586386
-     bm3                       2:c2e0ac586386
-
-check whether HG_PENDING makes pending changes only in relatd
-repositories visible to an external hook.
-
-In "hg share" case, another transaction can't run in other
-repositories sharing same source repository, because starting
-transaction requires locking store of source repository.
-
-Therefore, this test scenario ignores checking visibility of
-.hg/bookmakrs.pending in repo2, which shares repo1 without bookmarks.
-
-  $ cat > $TESTTMP/checkbookmarks.sh <<EOF
-  > echo "@repo1"
-  > hg -R "$TESTTMP/repo1" bookmarks
-  > echo "@repo2"
-  > hg -R "$TESTTMP/repo2" bookmarks
-  > echo "@repo3"
-  > hg -R "$TESTTMP/repo3" bookmarks
-  > exit 1 # to avoid adding new bookmark for subsequent tests
-  > EOF
-
-  $ cd ../repo1
-  $ hg --config hooks.pretxnclose="sh $TESTTMP/checkbookmarks.sh" -q book bmX
-  @repo1
-     bm1                       2:c2e0ac586386
-     bm3                       2:c2e0ac586386
-   * bmX                       2:c2e0ac586386
-  @repo2
-   * bm2                       3:0e6e70d1d5f1
-  @repo3
-     bm1                       2:c2e0ac586386
-   * bm3                       2:c2e0ac586386
-     bmX                       2:c2e0ac586386
-  transaction abort!
-  rollback completed
-  abort: pretxnclose hook exited with status 1
-  [255]
-  $ hg book bm1
-
-FYI, in contrast to above test, bmX is invisible in repo1 (= shared
-src), because (1) HG_PENDING refers only repo3 and (2)
-"bookmarks.pending" is written only into repo3.
-
-  $ cd ../repo3
-  $ hg --config hooks.pretxnclose="sh $TESTTMP/checkbookmarks.sh" -q book bmX
-  @repo1
-   * bm1                       2:c2e0ac586386
-     bm3                       2:c2e0ac586386
-  @repo2
-   * bm2                       3:0e6e70d1d5f1
-  @repo3
-     bm1                       2:c2e0ac586386
-     bm3                       2:c2e0ac586386
-   * bmX                       2:c2e0ac586386
-  transaction abort!
-  rollback completed
-  abort: pretxnclose hook exited with status 1
-  [255]
-  $ hg book bm3
-
-  $ cd ../repo1
-
-test that commits work
-
-  $ echo 'shared bookmarks' > a
-  $ hg commit -m 'testing shared bookmarks'
-  $ hg bookmarks
-   * bm1                       3:b87954705719
-     bm3                       2:c2e0ac586386
-  $ cd ../repo3
-  $ hg bookmarks
-     bm1                       3:b87954705719
-   * bm3                       2:c2e0ac586386
-  $ echo 'more shared bookmarks' > a
-  $ hg commit -m 'testing shared bookmarks'
-  created new head
-  $ hg bookmarks
-     bm1                       3:b87954705719
-   * bm3                       4:62f4ded848e4
-  $ cd ../repo1
-  $ hg bookmarks
-   * bm1                       3:b87954705719
-     bm3                       4:62f4ded848e4
-  $ cd ..
-
 non largefiles repos won't enable largefiles
 
-  $ hg share --config extensions.largefiles= repo3 sharedrepo
+  $ hg share --config extensions.largefiles= repo2 sharedrepo
   The fsmonitor extension is incompatible with the largefiles extension and has been disabled. (fsmonitor !)
   The fsmonitor extension is incompatible with the largefiles extension and has been disabled. (fsmonitor !)
   updating working directory
@@ -276,132 +167,6 @@
   $ [ -f sharedrepo/.hg/hgrc ]
   [1]
 
-test pushing bookmarks works
-
-  $ hg clone repo3 repo4
-  updating to branch default
-  2 files updated, 0 files merged, 0 files removed, 0 files unresolved
-  $ cd repo4
-  $ hg boo bm4
-  $ echo foo > b
-  $ hg commit -m 'foo in b'
-  $ hg boo
-     bm1                       3:b87954705719
-     bm3                       4:62f4ded848e4
-   * bm4                       5:92793bfc8cad
-  $ hg push -B bm4
-  pushing to $TESTTMP/repo3
-  searching for changes
-  adding changesets
-  adding manifests
-  adding file changes
-  added 1 changesets with 1 changes to 1 files
-  exporting bookmark bm4
-  $ cd ../repo1
-  $ hg bookmarks
-   * bm1                       3:b87954705719
-     bm3                       4:62f4ded848e4
-     bm4                       5:92793bfc8cad
-  $ cd ../repo3
-  $ hg bookmarks
-     bm1                       3:b87954705719
-   * bm3                       4:62f4ded848e4
-     bm4                       5:92793bfc8cad
-  $ cd ..
-
-test behavior when sharing a shared repo
-
-  $ hg share -B repo3 missingdir/repo5
-  updating working directory
-  2 files updated, 0 files merged, 0 files removed, 0 files unresolved
-  $ cd missingdir/repo5
-  $ hg book
-     bm1                       3:b87954705719
-     bm3                       4:62f4ded848e4
-     bm4                       5:92793bfc8cad
-  $ cd ../..
-
-test what happens when an active bookmark is deleted
-
-  $ cd repo1
-  $ hg boo -d bm3
-  $ hg boo
-   * bm1                       3:b87954705719
-     bm4                       5:92793bfc8cad
-  $ cd ../repo3
-  $ hg boo
-     bm1                       3:b87954705719
-     bm4                       5:92793bfc8cad
-  $ cd ..
-
-verify that bookmarks are not written on failed transaction
-
-  $ cat > failpullbookmarks.py << EOF
-  > """A small extension that makes bookmark pulls fail, for testing"""
-  > from __future__ import absolute_import
-  > from mercurial import (
-  >   error,
-  >   exchange,
-  >   extensions,
-  > )
-  > def _pullbookmarks(orig, pullop):
-  >     orig(pullop)
-  >     raise error.HookAbort('forced failure by extension')
-  > def extsetup(ui):
-  >     extensions.wrapfunction(exchange, '_pullbookmarks', _pullbookmarks)
-  > EOF
-  $ cd repo4
-  $ hg boo
-     bm1                       3:b87954705719
-     bm3                       4:62f4ded848e4
-   * bm4                       5:92793bfc8cad
-  $ cd ../repo3
-  $ hg boo
-     bm1                       3:b87954705719
-     bm4                       5:92793bfc8cad
-  $ hg --config "extensions.failpullbookmarks=$TESTTMP/failpullbookmarks.py" pull $TESTTMP/repo4
-  pulling from $TESTTMP/repo4
-  searching for changes
-  no changes found
-  adding remote bookmark bm3
-  abort: forced failure by extension
-  [255]
-  $ hg boo
-     bm1                       3:b87954705719
-     bm4                       5:92793bfc8cad
-  $ hg pull $TESTTMP/repo4
-  pulling from $TESTTMP/repo4
-  searching for changes
-  no changes found
-  adding remote bookmark bm3
-  1 local changesets published
-  $ hg boo
-     bm1                       3:b87954705719
-   * bm3                       4:62f4ded848e4
-     bm4                       5:92793bfc8cad
-  $ cd ..
-
-verify bookmark behavior after unshare
-
-  $ cd repo3
-  $ hg unshare
-  $ hg boo
-     bm1                       3:b87954705719
-   * bm3                       4:62f4ded848e4
-     bm4                       5:92793bfc8cad
-  $ hg boo -d bm4
-  $ hg boo bm5
-  $ hg boo
-     bm1                       3:b87954705719
-     bm3                       4:62f4ded848e4
-   * bm5                       4:62f4ded848e4
-  $ cd ../repo1
-  $ hg boo
-   * bm1                       3:b87954705719
-     bm3                       4:62f4ded848e4
-     bm4                       5:92793bfc8cad
-  $ cd ..
-
 test shared clones using relative paths work
 
   $ mkdir thisdir
--- a/tests/test-subrepo.t	Mon May 20 10:08:28 2019 +0200
+++ b/tests/test-subrepo.t	Mon May 20 11:40:47 2019 -0400
@@ -274,7 +274,6 @@
   $ hg ci -m9
   created new head
   $ hg merge 6 --debug # test change
-    searching for copies back to rev 2
   resolving manifests
    branchmerge: True, force: False, partial: False
    ancestor: 1f14a2e2d3ec, local: f0d2028bf86d+, remote: 1831e14459c4
@@ -301,7 +300,6 @@
   $ hg ci -m10
   committing subrepository t
   $ HGMERGE=internal:merge hg merge --debug 7 # test conflict
-    searching for copies back to rev 2
   resolving manifests
    branchmerge: True, force: False, partial: False
    ancestor: 1831e14459c4, local: e45c8b14af55+, remote: f94576341bcf
@@ -313,7 +311,6 @@
   starting 4 threads for background file closing (?)
   (M)erge, keep (l)ocal [working copy] or keep (r)emote [merge rev]? m
   merging subrepository "t"
-    searching for copies back to rev 2
   resolving manifests
    branchmerge: True, force: False, partial: False
    ancestor: 6747d179aa9a, local: 20a0db6fbf6c+, remote: 7af322bc1198
--- a/tests/test-tags.t	Mon May 20 10:08:28 2019 +0200
+++ b/tests/test-tags.t	Mon May 20 11:40:47 2019 -0400
@@ -145,7 +145,7 @@
   $ hg blackbox -l 6
   1970/01/01 00:00:00 bob @b9154636be938d3d431e75a7c906504a079bfe07 (5000)> identify
   1970/01/01 00:00:00 bob @b9154636be938d3d431e75a7c906504a079bfe07 (5000)> writing 48 bytes to cache/hgtagsfnodes1
-  1970/01/01 00:00:00 bob @b9154636be938d3d431e75a7c906504a079bfe07 (5000)> 0/1 cache hits/lookups in * seconds (glob)
+  1970/01/01 00:00:00 bob @b9154636be938d3d431e75a7c906504a079bfe07 (5000)> 0/2 cache hits/lookups in * seconds (glob)
   1970/01/01 00:00:00 bob @b9154636be938d3d431e75a7c906504a079bfe07 (5000)> writing .hg/cache/tags2-visible with 1 tags
   1970/01/01 00:00:00 bob @b9154636be938d3d431e75a7c906504a079bfe07 (5000)> identify exited 0 after * seconds (glob)
   1970/01/01 00:00:00 bob @b9154636be938d3d431e75a7c906504a079bfe07 (5000)> blackbox -l 6
@@ -159,7 +159,7 @@
   $ hg blackbox -l 6
   1970/01/01 00:00:00 bob @b9154636be938d3d431e75a7c906504a079bfe07 (5000)> identify
   1970/01/01 00:00:00 bob @b9154636be938d3d431e75a7c906504a079bfe07 (5000)> not writing .hg/cache/hgtagsfnodes1 because lock cannot be acquired
-  1970/01/01 00:00:00 bob @b9154636be938d3d431e75a7c906504a079bfe07 (5000)> 0/1 cache hits/lookups in * seconds (glob)
+  1970/01/01 00:00:00 bob @b9154636be938d3d431e75a7c906504a079bfe07 (5000)> 0/2 cache hits/lookups in * seconds (glob)
   1970/01/01 00:00:00 bob @b9154636be938d3d431e75a7c906504a079bfe07 (5000)> writing .hg/cache/tags2-visible with 1 tags
   1970/01/01 00:00:00 bob @b9154636be938d3d431e75a7c906504a079bfe07 (5000)> identify exited 0 after * seconds (glob)
   1970/01/01 00:00:00 bob @b9154636be938d3d431e75a7c906504a079bfe07 (5000)> blackbox -l 6
@@ -363,7 +363,7 @@
   $ hg blackbox -l 6
   1970/01/01 00:00:00 bob @8dbfe60eff306a54259cfe007db9e330e7ecf866 (5000)> tags
   1970/01/01 00:00:00 bob @8dbfe60eff306a54259cfe007db9e330e7ecf866 (5000)> writing 24 bytes to cache/hgtagsfnodes1
-  1970/01/01 00:00:00 bob @8dbfe60eff306a54259cfe007db9e330e7ecf866 (5000)> 2/3 cache hits/lookups in * seconds (glob)
+  1970/01/01 00:00:00 bob @8dbfe60eff306a54259cfe007db9e330e7ecf866 (5000)> 3/4 cache hits/lookups in * seconds (glob)
   1970/01/01 00:00:00 bob @8dbfe60eff306a54259cfe007db9e330e7ecf866 (5000)> writing .hg/cache/tags2-visible with 1 tags
   1970/01/01 00:00:00 bob @8dbfe60eff306a54259cfe007db9e330e7ecf866 (5000)> tags exited 0 after * seconds (glob)
   1970/01/01 00:00:00 bob @8dbfe60eff306a54259cfe007db9e330e7ecf866 (5000)> blackbox -l 6
@@ -384,7 +384,7 @@
   $ hg blackbox -l 6
   1970/01/01 00:00:00 bob @b968051b5cf3f624b771779c6d5f84f1d4c3fb5d (5000)> tags
   1970/01/01 00:00:00 bob @b968051b5cf3f624b771779c6d5f84f1d4c3fb5d (5000)> couldn't write cache/hgtagsfnodes1: [Errno *] * (glob)
-  1970/01/01 00:00:00 bob @b968051b5cf3f624b771779c6d5f84f1d4c3fb5d (5000)> 2/3 cache hits/lookups in * seconds (glob)
+  1970/01/01 00:00:00 bob @b968051b5cf3f624b771779c6d5f84f1d4c3fb5d (5000)> 3/4 cache hits/lookups in * seconds (glob)
   1970/01/01 00:00:00 bob @b968051b5cf3f624b771779c6d5f84f1d4c3fb5d (5000)> writing .hg/cache/tags2-visible with 1 tags
   1970/01/01 00:00:00 bob @b968051b5cf3f624b771779c6d5f84f1d4c3fb5d (5000)> tags exited 0 after * seconds (glob)
   1970/01/01 00:00:00 bob @b968051b5cf3f624b771779c6d5f84f1d4c3fb5d (5000)> blackbox -l 6
@@ -399,7 +399,7 @@
   $ hg blackbox -l 6
   1970/01/01 00:00:00 bob @b968051b5cf3f624b771779c6d5f84f1d4c3fb5d (5000)> tags
   1970/01/01 00:00:00 bob @b968051b5cf3f624b771779c6d5f84f1d4c3fb5d (5000)> writing 24 bytes to cache/hgtagsfnodes1
-  1970/01/01 00:00:00 bob @b968051b5cf3f624b771779c6d5f84f1d4c3fb5d (5000)> 2/3 cache hits/lookups in * seconds (glob)
+  1970/01/01 00:00:00 bob @b968051b5cf3f624b771779c6d5f84f1d4c3fb5d (5000)> 3/4 cache hits/lookups in * seconds (glob)
   1970/01/01 00:00:00 bob @b968051b5cf3f624b771779c6d5f84f1d4c3fb5d (5000)> writing .hg/cache/tags2-visible with 1 tags
   1970/01/01 00:00:00 bob @b968051b5cf3f624b771779c6d5f84f1d4c3fb5d (5000)> tags exited 0 after * seconds (glob)
   1970/01/01 00:00:00 bob @b968051b5cf3f624b771779c6d5f84f1d4c3fb5d (5000)> blackbox -l 6
@@ -427,7 +427,7 @@
 
   $ hg blackbox -l 5
   1970/01/01 00:00:00 bob @0c192d7d5e6b78a714de54a2e9627952a877e25a (5000)> writing 24 bytes to cache/hgtagsfnodes1
-  1970/01/01 00:00:00 bob @0c192d7d5e6b78a714de54a2e9627952a877e25a (5000)> 2/3 cache hits/lookups in * seconds (glob)
+  1970/01/01 00:00:00 bob @0c192d7d5e6b78a714de54a2e9627952a877e25a (5000)> 2/4 cache hits/lookups in * seconds (glob)
   1970/01/01 00:00:00 bob @0c192d7d5e6b78a714de54a2e9627952a877e25a (5000)> writing .hg/cache/tags2-visible with 1 tags
   1970/01/01 00:00:00 bob @0c192d7d5e6b78a714de54a2e9627952a877e25a (5000)> tags exited 0 after * seconds (glob)
   1970/01/01 00:00:00 bob @0c192d7d5e6b78a714de54a2e9627952a877e25a (5000)> blackbox -l 5
@@ -445,7 +445,7 @@
   $ hg blackbox -l 6
   1970/01/01 00:00:00 bob @035f65efb448350f4772141702a81ab1df48c465 (5000)> tags
   1970/01/01 00:00:00 bob @035f65efb448350f4772141702a81ab1df48c465 (5000)> writing 24 bytes to cache/hgtagsfnodes1
-  1970/01/01 00:00:00 bob @035f65efb448350f4772141702a81ab1df48c465 (5000)> 2/3 cache hits/lookups in * seconds (glob)
+  1970/01/01 00:00:00 bob @035f65efb448350f4772141702a81ab1df48c465 (5000)> 3/4 cache hits/lookups in * seconds (glob)
   1970/01/01 00:00:00 bob @035f65efb448350f4772141702a81ab1df48c465 (5000)> writing .hg/cache/tags2-visible with 1 tags
   1970/01/01 00:00:00 bob @035f65efb448350f4772141702a81ab1df48c465 (5000)> tags exited 0 after * seconds (glob)
   1970/01/01 00:00:00 bob @035f65efb448350f4772141702a81ab1df48c465 (5000)> blackbox -l 6
--- a/tests/test-up-local-change.t	Mon May 20 10:08:28 2019 +0200
+++ b/tests/test-up-local-change.t	Mon May 20 11:40:47 2019 -0400
@@ -40,7 +40,6 @@
   summary:     1
   
   $ hg --debug up
-    searching for copies back to rev 1
     unmatched files in other:
      b
   resolving manifests
@@ -68,9 +67,6 @@
   
   $ hg --debug up 0
   starting 4 threads for background file closing (?)
-    searching for copies back to rev 0
-    unmatched files in local (from topological common ancestor):
-     b
   resolving manifests
    branchmerge: False, force: False, partial: False
    ancestor: 1e71731e6fbb, local: 1e71731e6fbb+, remote: c19d34741b0a
@@ -95,7 +91,6 @@
   summary:     1
   
   $ hg --debug up
-    searching for copies back to rev 1
     unmatched files in other:
      b
   resolving manifests