--- a/contrib/all-revsets.txt Tue Jul 09 10:07:35 2019 -0400
+++ b/contrib/all-revsets.txt Mon Jul 22 14:00:33 2019 -0400
@@ -154,3 +154,6 @@
roots(matching(tip, "author"))
roots(matching(tip, "author")) and -10000:-1
(-10000:-1) and roots(matching(tip, "author"))
+only(max(head()))
+only(max(head()), min(head()))
+only(max(head()), limit(head(), 1, 1))
--- a/contrib/automation/README.rst Tue Jul 09 10:07:35 2019 -0400
+++ b/contrib/automation/README.rst Mon Jul 22 14:00:33 2019 -0400
@@ -101,9 +101,14 @@
* Storage costs for AMI / EBS snapshots. This should be just a few pennies
per month.
-When running EC2 instances, you'll be billed accordingly. By default, we
-use *small* instances, like ``t3.medium``. This instance type costs ~$0.07 per
-hour.
+When running EC2 instances, you'll be billed accordingly. Default instance
+types vary by operation. We try to be respectful of your money when choosing
+defaults. e.g. for Windows instances which are billed per hour, we use e.g.
+``t3.medium`` instances, which cost ~$0.07 per hour. For operations that
+scale well to many CPUs like running Linux tests, we may use a more powerful
+instance like ``c5.9xlarge``. However, since Linux instances are billed
+per second and the cost of running an e.g. ``c5.9xlarge`` for half the time
+of a ``c5.4xlarge`` is roughly the same, the choice is justified.
.. note::
@@ -125,3 +130,54 @@
To purge all EC2 resources that we manage::
$ automation.py purge-ec2-resources
+
+Remote Machine Interfaces
+=========================
+
+The code that connects to a remote machine and executes things is
+theoretically machine agnostic as long as the remote machine conforms to
+an *interface*. In other words, to perform actions like running tests
+remotely or triggering packaging, it shouldn't matter if the remote machine
+is an EC2 instance, a virtual machine, etc. This section attempts to document
+the interface that remote machines need to provide in order to be valid
+*targets* for remote execution. These interfaces are often not ideal nor
+the most flexible. Instead, they have often evolved as the requirements of
+our automation code have evolved.
+
+Linux
+-----
+
+Remote Linux machines expose an SSH server on port 22. The SSH server
+must allow the ``hg`` user to authenticate using the SSH key generated by
+the automation code. The ``hg`` user should be part of the ``hg`` group
+and it should have ``sudo`` access without password prompting.
+
+The SSH channel must support SFTP to facilitate transferring files from
+client to server.
+
+``/bin/bash`` must be executable and point to a bash shell executable.
+
+The ``/hgdev`` directory must exist and all its content owned by ``hg::hg``.
+
+The ``/hgdev/pyenv`` directory should contain an installation of
+``pyenv``. Various Python distributions should be installed. The exact
+versions shouldn't matter. ``pyenv global`` should have been run so
+``/hgdev/pyenv/shims/`` is populated with redirector scripts that point
+to the appropriate Python executable.
+
+The ``/hgdev/venv-bootstrap`` directory must contain a virtualenv
+with Mercurial installed. The ``/hgdev/venv-bootstrap/bin/hg`` executable
+is referenced by various scripts and the client.
+
+The ``/hgdev/src`` directory MUST contain a clone of the Mercurial
+source code. The state of the working directory is not important.
+
+In order to run tests, the ``/hgwork`` directory will be created.
+This may require running various ``mkfs.*`` executables and ``mount``
+to provision a new filesystem. This will require elevated privileges
+via ``sudo``.
+
+Various dependencies to run the Mercurial test harness are also required.
+Documenting them is beyond the scope of this document. Various tests
+also require other optional dependencies and missing dependencies will
+be printed by the test runner when a test is skipped.
--- a/contrib/automation/hgautomation/__init__.py Tue Jul 09 10:07:35 2019 -0400
+++ b/contrib/automation/hgautomation/__init__.py Mon Jul 22 14:00:33 2019 -0400
@@ -53,7 +53,7 @@
return password
- def aws_connection(self, region: str):
+ def aws_connection(self, region: str, ensure_ec2_state: bool=True):
"""Obtain an AWSConnection instance bound to a specific region."""
- return AWSConnection(self, region)
+ return AWSConnection(self, region, ensure_ec2_state=ensure_ec2_state)
--- a/contrib/automation/hgautomation/aws.py Tue Jul 09 10:07:35 2019 -0400
+++ b/contrib/automation/hgautomation/aws.py Mon Jul 22 14:00:33 2019 -0400
@@ -19,6 +19,13 @@
import boto3
import botocore.exceptions
+from .linux import (
+ BOOTSTRAP_DEBIAN,
+)
+from .ssh import (
+ exec_command as ssh_exec_command,
+ wait_for_ssh,
+)
from .winrm import (
run_powershell,
wait_for_winrm,
@@ -31,12 +38,46 @@
'install-windows-dependencies.ps1')
+INSTANCE_TYPES_WITH_STORAGE = {
+ 'c5d',
+ 'd2',
+ 'h1',
+ 'i3',
+ 'm5ad',
+ 'm5d',
+ 'r5d',
+ 'r5ad',
+ 'x1',
+ 'z1d',
+}
+
+
+DEBIAN_ACCOUNT_ID = '379101102735'
+UBUNTU_ACCOUNT_ID = '099720109477'
+
+
KEY_PAIRS = {
'automation',
}
SECURITY_GROUPS = {
+ 'linux-dev-1': {
+ 'description': 'Mercurial Linux instances that perform build/test automation',
+ 'ingress': [
+ {
+ 'FromPort': 22,
+ 'ToPort': 22,
+ 'IpProtocol': 'tcp',
+ 'IpRanges': [
+ {
+ 'CidrIp': '0.0.0.0/0',
+ 'Description': 'SSH from entire Internet',
+ },
+ ],
+ },
+ ],
+ },
'windows-dev-1': {
'description': 'Mercurial Windows instances that perform build automation',
'ingress': [
@@ -180,7 +221,7 @@
class AWSConnection:
"""Manages the state of a connection with AWS."""
- def __init__(self, automation, region: str):
+ def __init__(self, automation, region: str, ensure_ec2_state: bool=True):
self.automation = automation
self.local_state_path = automation.state_path
@@ -191,11 +232,12 @@
self.ec2resource = self.session.resource('ec2')
self.iamclient = self.session.client('iam')
self.iamresource = self.session.resource('iam')
-
- ensure_key_pairs(automation.state_path, self.ec2resource)
+ self.security_groups = {}
- self.security_groups = ensure_security_groups(self.ec2resource)
- ensure_iam_state(self.iamresource)
+ if ensure_ec2_state:
+ ensure_key_pairs(automation.state_path, self.ec2resource)
+ self.security_groups = ensure_security_groups(self.ec2resource)
+ ensure_iam_state(self.iamclient, self.iamresource)
def key_pair_path_private(self, name):
"""Path to a key pair private key file."""
@@ -324,7 +366,7 @@
profile.delete()
-def ensure_iam_state(iamresource, prefix='hg-'):
+def ensure_iam_state(iamclient, iamresource, prefix='hg-'):
"""Ensure IAM state is in sync with our canonical definition."""
remote_profiles = {}
@@ -360,6 +402,10 @@
InstanceProfileName=actual)
remote_profiles[name] = profile
+ waiter = iamclient.get_waiter('instance_profile_exists')
+ waiter.wait(InstanceProfileName=actual)
+ print('IAM instance profile %s is available' % actual)
+
for name in sorted(set(IAM_ROLES) - set(remote_roles)):
entry = IAM_ROLES[name]
@@ -372,6 +418,10 @@
AssumeRolePolicyDocument=ASSUME_ROLE_POLICY_DOCUMENT,
)
+ waiter = iamclient.get_waiter('role_exists')
+ waiter.wait(RoleName=actual)
+ print('IAM role %s is available' % actual)
+
remote_roles[name] = role
for arn in entry['policy_arns']:
@@ -393,14 +443,14 @@
profile.add_role(RoleName=role)
-def find_windows_server_2019_image(ec2resource):
- """Find the Amazon published Windows Server 2019 base image."""
+def find_image(ec2resource, owner_id, name):
+ """Find an AMI by its owner ID and name."""
images = ec2resource.images.filter(
Filters=[
{
- 'Name': 'owner-alias',
- 'Values': ['amazon'],
+ 'Name': 'owner-id',
+ 'Values': [owner_id],
},
{
'Name': 'state',
@@ -412,14 +462,14 @@
},
{
'Name': 'name',
- 'Values': ['Windows_Server-2019-English-Full-Base-2019.02.13'],
+ 'Values': [name],
},
])
for image in images:
return image
- raise Exception('unable to find Windows Server 2019 image')
+ raise Exception('unable to find image for %s' % name)
def ensure_security_groups(ec2resource, prefix='hg-'):
@@ -490,7 +540,7 @@
terminate_ec2_instances(ec2resource, prefix=prefix)
- for image in ec2resource.images.all():
+ for image in ec2resource.images.filter(Owners=['self']):
if image.name.startswith(prefix):
remove_ami(ec2resource, image)
@@ -505,6 +555,10 @@
for role in iamresource.roles.all():
if role.name.startswith(prefix):
+ for p in role.attached_policies.all():
+ print('detaching policy %s from %s' % (p.arn, role.name))
+ role.detach_policy(PolicyArn=p.arn)
+
print('removing role %s' % role.name)
role.delete()
@@ -671,6 +725,309 @@
yield instances
+def resolve_fingerprint(fingerprint):
+ fingerprint = json.dumps(fingerprint, sort_keys=True)
+ return hashlib.sha256(fingerprint.encode('utf-8')).hexdigest()
+
+
+def find_and_reconcile_image(ec2resource, name, fingerprint):
+ """Attempt to find an existing EC2 AMI with a name and fingerprint.
+
+ If an image with the specified fingerprint is found, it is returned.
+ Otherwise None is returned.
+
+ Existing images for the specified name that don't have the specified
+ fingerprint or are missing required metadata or deleted.
+ """
+ # Find existing AMIs with this name and delete the ones that are invalid.
+ # Store a reference to a good image so it can be returned one the
+ # image state is reconciled.
+ images = ec2resource.images.filter(
+ Filters=[{'Name': 'name', 'Values': [name]}])
+
+ existing_image = None
+
+ for image in images:
+ if image.tags is None:
+ print('image %s for %s lacks required tags; removing' % (
+ image.id, image.name))
+ remove_ami(ec2resource, image)
+ else:
+ tags = {t['Key']: t['Value'] for t in image.tags}
+
+ if tags.get('HGIMAGEFINGERPRINT') == fingerprint:
+ existing_image = image
+ else:
+ print('image %s for %s has wrong fingerprint; removing' % (
+ image.id, image.name))
+ remove_ami(ec2resource, image)
+
+ return existing_image
+
+
+def create_ami_from_instance(ec2client, instance, name, description,
+ fingerprint):
+ """Create an AMI from a running instance.
+
+ Returns the ``ec2resource.Image`` representing the created AMI.
+ """
+ instance.stop()
+
+ ec2client.get_waiter('instance_stopped').wait(
+ InstanceIds=[instance.id],
+ WaiterConfig={
+ 'Delay': 5,
+ })
+ print('%s is stopped' % instance.id)
+
+ image = instance.create_image(
+ Name=name,
+ Description=description,
+ )
+
+ image.create_tags(Tags=[
+ {
+ 'Key': 'HGIMAGEFINGERPRINT',
+ 'Value': fingerprint,
+ },
+ ])
+
+ print('waiting for image %s' % image.id)
+
+ ec2client.get_waiter('image_available').wait(
+ ImageIds=[image.id],
+ )
+
+ print('image %s available as %s' % (image.id, image.name))
+
+ return image
+
+
+def ensure_linux_dev_ami(c: AWSConnection, distro='debian9', prefix='hg-'):
+ """Ensures a Linux development AMI is available and up-to-date.
+
+ Returns an ``ec2.Image`` of either an existing AMI or a newly-built one.
+ """
+ ec2client = c.ec2client
+ ec2resource = c.ec2resource
+
+ name = '%s%s-%s' % (prefix, 'linux-dev', distro)
+
+ if distro == 'debian9':
+ image = find_image(
+ ec2resource,
+ DEBIAN_ACCOUNT_ID,
+ 'debian-stretch-hvm-x86_64-gp2-2019-02-19-26620',
+ )
+ ssh_username = 'admin'
+ elif distro == 'ubuntu18.04':
+ image = find_image(
+ ec2resource,
+ UBUNTU_ACCOUNT_ID,
+ 'ubuntu/images/hvm-ssd/ubuntu-bionic-18.04-amd64-server-20190403',
+ )
+ ssh_username = 'ubuntu'
+ elif distro == 'ubuntu18.10':
+ image = find_image(
+ ec2resource,
+ UBUNTU_ACCOUNT_ID,
+ 'ubuntu/images/hvm-ssd/ubuntu-cosmic-18.10-amd64-server-20190402',
+ )
+ ssh_username = 'ubuntu'
+ elif distro == 'ubuntu19.04':
+ image = find_image(
+ ec2resource,
+ UBUNTU_ACCOUNT_ID,
+ 'ubuntu/images/hvm-ssd/ubuntu-disco-19.04-amd64-server-20190417',
+ )
+ ssh_username = 'ubuntu'
+ else:
+ raise ValueError('unsupported Linux distro: %s' % distro)
+
+ config = {
+ 'BlockDeviceMappings': [
+ {
+ 'DeviceName': image.block_device_mappings[0]['DeviceName'],
+ 'Ebs': {
+ 'DeleteOnTermination': True,
+ 'VolumeSize': 8,
+ 'VolumeType': 'gp2',
+ },
+ },
+ ],
+ 'EbsOptimized': True,
+ 'ImageId': image.id,
+ 'InstanceInitiatedShutdownBehavior': 'stop',
+ # 8 VCPUs for compiling Python.
+ 'InstanceType': 't3.2xlarge',
+ 'KeyName': '%sautomation' % prefix,
+ 'MaxCount': 1,
+ 'MinCount': 1,
+ 'SecurityGroupIds': [c.security_groups['linux-dev-1'].id],
+ }
+
+ requirements2_path = (pathlib.Path(__file__).parent.parent /
+ 'linux-requirements-py2.txt')
+ requirements3_path = (pathlib.Path(__file__).parent.parent /
+ 'linux-requirements-py3.txt')
+ with requirements2_path.open('r', encoding='utf-8') as fh:
+ requirements2 = fh.read()
+ with requirements3_path.open('r', encoding='utf-8') as fh:
+ requirements3 = fh.read()
+
+ # Compute a deterministic fingerprint to determine whether image needs to
+ # be regenerated.
+ fingerprint = resolve_fingerprint({
+ 'instance_config': config,
+ 'bootstrap_script': BOOTSTRAP_DEBIAN,
+ 'requirements_py2': requirements2,
+ 'requirements_py3': requirements3,
+ })
+
+ existing_image = find_and_reconcile_image(ec2resource, name, fingerprint)
+
+ if existing_image:
+ return existing_image
+
+ print('no suitable %s image found; creating one...' % name)
+
+ with temporary_ec2_instances(ec2resource, config) as instances:
+ wait_for_ip_addresses(instances)
+
+ instance = instances[0]
+
+ client = wait_for_ssh(
+ instance.public_ip_address, 22,
+ username=ssh_username,
+ key_filename=str(c.key_pair_path_private('automation')))
+
+ home = '/home/%s' % ssh_username
+
+ with client:
+ print('connecting to SSH server')
+ sftp = client.open_sftp()
+
+ print('uploading bootstrap files')
+ with sftp.open('%s/bootstrap' % home, 'wb') as fh:
+ fh.write(BOOTSTRAP_DEBIAN)
+ fh.chmod(0o0700)
+
+ with sftp.open('%s/requirements-py2.txt' % home, 'wb') as fh:
+ fh.write(requirements2)
+ fh.chmod(0o0700)
+
+ with sftp.open('%s/requirements-py3.txt' % home, 'wb') as fh:
+ fh.write(requirements3)
+ fh.chmod(0o0700)
+
+ print('executing bootstrap')
+ chan, stdin, stdout = ssh_exec_command(client,
+ '%s/bootstrap' % home)
+ stdin.close()
+
+ for line in stdout:
+ print(line, end='')
+
+ res = chan.recv_exit_status()
+ if res:
+ raise Exception('non-0 exit from bootstrap: %d' % res)
+
+ print('bootstrap completed; stopping %s to create %s' % (
+ instance.id, name))
+
+ return create_ami_from_instance(ec2client, instance, name,
+ 'Mercurial Linux development environment',
+ fingerprint)
+
+
+@contextlib.contextmanager
+def temporary_linux_dev_instances(c: AWSConnection, image, instance_type,
+ prefix='hg-', ensure_extra_volume=False):
+ """Create temporary Linux development EC2 instances.
+
+ Context manager resolves to a list of ``ec2.Instance`` that were created
+ and are running.
+
+ ``ensure_extra_volume`` can be set to ``True`` to require that instances
+ have a 2nd storage volume available other than the primary AMI volume.
+ For instance types with instance storage, this does nothing special.
+ But for instance types without instance storage, an additional EBS volume
+ will be added to the instance.
+
+ Instances have an ``ssh_client`` attribute containing a paramiko SSHClient
+ instance bound to the instance.
+
+ Instances have an ``ssh_private_key_path`` attributing containing the
+ str path to the SSH private key to connect to the instance.
+ """
+
+ block_device_mappings = [
+ {
+ 'DeviceName': image.block_device_mappings[0]['DeviceName'],
+ 'Ebs': {
+ 'DeleteOnTermination': True,
+ 'VolumeSize': 8,
+ 'VolumeType': 'gp2',
+ },
+ }
+ ]
+
+ # This is not an exhaustive list of instance types having instance storage.
+ # But
+ if (ensure_extra_volume
+ and not instance_type.startswith(tuple(INSTANCE_TYPES_WITH_STORAGE))):
+ main_device = block_device_mappings[0]['DeviceName']
+
+ if main_device == 'xvda':
+ second_device = 'xvdb'
+ elif main_device == '/dev/sda1':
+ second_device = '/dev/sdb'
+ else:
+ raise ValueError('unhandled primary EBS device name: %s' %
+ main_device)
+
+ block_device_mappings.append({
+ 'DeviceName': second_device,
+ 'Ebs': {
+ 'DeleteOnTermination': True,
+ 'VolumeSize': 8,
+ 'VolumeType': 'gp2',
+ }
+ })
+
+ config = {
+ 'BlockDeviceMappings': block_device_mappings,
+ 'EbsOptimized': True,
+ 'ImageId': image.id,
+ 'InstanceInitiatedShutdownBehavior': 'terminate',
+ 'InstanceType': instance_type,
+ 'KeyName': '%sautomation' % prefix,
+ 'MaxCount': 1,
+ 'MinCount': 1,
+ 'SecurityGroupIds': [c.security_groups['linux-dev-1'].id],
+ }
+
+ with temporary_ec2_instances(c.ec2resource, config) as instances:
+ wait_for_ip_addresses(instances)
+
+ ssh_private_key_path = str(c.key_pair_path_private('automation'))
+
+ for instance in instances:
+ client = wait_for_ssh(
+ instance.public_ip_address, 22,
+ username='hg',
+ key_filename=ssh_private_key_path)
+
+ instance.ssh_client = client
+ instance.ssh_private_key_path = ssh_private_key_path
+
+ try:
+ yield instances
+ finally:
+ for instance in instances:
+ instance.ssh_client.close()
+
+
def ensure_windows_dev_ami(c: AWSConnection, prefix='hg-'):
"""Ensure Windows Development AMI is available and up-to-date.
@@ -689,6 +1046,10 @@
name = '%s%s' % (prefix, 'windows-dev')
+ image = find_image(ec2resource,
+ '801119661308',
+ 'Windows_Server-2019-English-Full-Base-2019.02.13')
+
config = {
'BlockDeviceMappings': [
{
@@ -700,7 +1061,7 @@
},
}
],
- 'ImageId': find_windows_server_2019_image(ec2resource).id,
+ 'ImageId': image.id,
'InstanceInitiatedShutdownBehavior': 'stop',
'InstanceType': 't3.medium',
'KeyName': '%sautomation' % prefix,
@@ -735,38 +1096,14 @@
# Compute a deterministic fingerprint to determine whether image needs
# to be regenerated.
- fingerprint = {
+ fingerprint = resolve_fingerprint({
'instance_config': config,
'user_data': WINDOWS_USER_DATA,
'initial_bootstrap': WINDOWS_BOOTSTRAP_POWERSHELL,
'bootstrap_commands': commands,
- }
-
- fingerprint = json.dumps(fingerprint, sort_keys=True)
- fingerprint = hashlib.sha256(fingerprint.encode('utf-8')).hexdigest()
-
- # Find existing AMIs with this name and delete the ones that are invalid.
- # Store a reference to a good image so it can be returned one the
- # image state is reconciled.
- images = ec2resource.images.filter(
- Filters=[{'Name': 'name', 'Values': [name]}])
-
- existing_image = None
+ })
- for image in images:
- if image.tags is None:
- print('image %s for %s lacks required tags; removing' % (
- image.id, image.name))
- remove_ami(ec2resource, image)
- else:
- tags = {t['Key']: t['Value'] for t in image.tags}
-
- if tags.get('HGIMAGEFINGERPRINT') == fingerprint:
- existing_image = image
- else:
- print('image %s for %s has wrong fingerprint; removing' % (
- image.id, image.name))
- remove_ami(ec2resource, image)
+ existing_image = find_and_reconcile_image(ec2resource, name, fingerprint)
if existing_image:
return existing_image
@@ -795,10 +1132,26 @@
)
# Reboot so all updates are fully applied.
+ #
+ # We don't use instance.reboot() here because it is asynchronous and
+ # we don't know when exactly the instance has rebooted. It could take
+ # a while to stop and we may start trying to interact with the instance
+ # before it has rebooted.
print('rebooting instance %s' % instance.id)
- ec2client.reboot_instances(InstanceIds=[instance.id])
+ instance.stop()
+ ec2client.get_waiter('instance_stopped').wait(
+ InstanceIds=[instance.id],
+ WaiterConfig={
+ 'Delay': 5,
+ })
- time.sleep(15)
+ instance.start()
+ wait_for_ip_addresses([instance])
+
+ # There is a race condition here between the User Data PS script running
+ # and us connecting to WinRM. This can manifest as
+ # "AuthorizationManager check failed" failures during run_powershell().
+ # TODO figure out a workaround.
print('waiting for Windows Remote Management to come back...')
client = wait_for_winrm(instance.public_ip_address, 'Administrator',
@@ -810,36 +1163,9 @@
run_powershell(instance.winrm_client, '\n'.join(commands))
print('bootstrap completed; stopping %s to create image' % instance.id)
- instance.stop()
-
- ec2client.get_waiter('instance_stopped').wait(
- InstanceIds=[instance.id],
- WaiterConfig={
- 'Delay': 5,
- })
- print('%s is stopped' % instance.id)
-
- image = instance.create_image(
- Name=name,
- Description='Mercurial Windows development environment',
- )
-
- image.create_tags(Tags=[
- {
- 'Key': 'HGIMAGEFINGERPRINT',
- 'Value': fingerprint,
- },
- ])
-
- print('waiting for image %s' % image.id)
-
- ec2client.get_waiter('image_available').wait(
- ImageIds=[image.id],
- )
-
- print('image %s available as %s' % (image.id, image.name))
-
- return image
+ return create_ami_from_instance(ec2client, instance, name,
+ 'Mercurial Windows development environment',
+ fingerprint)
@contextlib.contextmanager
--- a/contrib/automation/hgautomation/cli.py Tue Jul 09 10:07:35 2019 -0400
+++ b/contrib/automation/hgautomation/cli.py Mon Jul 22 14:00:33 2019 -0400
@@ -8,12 +8,15 @@
# no-check-code because Python 3 native.
import argparse
+import concurrent.futures as futures
import os
import pathlib
+import time
from . import (
aws,
HGAutomation,
+ linux,
windows,
)
@@ -22,6 +25,33 @@
DIST_PATH = SOURCE_ROOT / 'dist'
+def bootstrap_linux_dev(hga: HGAutomation, aws_region, distros=None,
+ parallel=False):
+ c = hga.aws_connection(aws_region)
+
+ if distros:
+ distros = distros.split(',')
+ else:
+ distros = sorted(linux.DISTROS)
+
+ # TODO There is a wonky interaction involving KeyboardInterrupt whereby
+ # the context manager that is supposed to terminate the temporary EC2
+ # instance doesn't run. Until we fix this, make parallel building opt-in
+ # so we don't orphan instances.
+ if parallel:
+ fs = []
+
+ with futures.ThreadPoolExecutor(len(distros)) as e:
+ for distro in distros:
+ fs.append(e.submit(aws.ensure_linux_dev_ami, c, distro=distro))
+
+ for f in fs:
+ f.result()
+ else:
+ for distro in distros:
+ aws.ensure_linux_dev_ami(c, distro=distro)
+
+
def bootstrap_windows_dev(hga: HGAutomation, aws_region):
c = hga.aws_connection(aws_region)
image = aws.ensure_windows_dev_ami(c)
@@ -73,7 +103,8 @@
windows.build_wheel(instance.winrm_client, a, DIST_PATH)
-def build_all_windows_packages(hga: HGAutomation, aws_region, revision):
+def build_all_windows_packages(hga: HGAutomation, aws_region, revision,
+ version):
c = hga.aws_connection(aws_region)
image = aws.ensure_windows_dev_ami(c)
DIST_PATH.mkdir(exist_ok=True)
@@ -89,19 +120,52 @@
windows.purge_hg(winrm_client)
windows.build_wheel(winrm_client, arch, DIST_PATH)
windows.purge_hg(winrm_client)
- windows.build_inno_installer(winrm_client, arch, DIST_PATH)
+ windows.build_inno_installer(winrm_client, arch, DIST_PATH,
+ version=version)
windows.purge_hg(winrm_client)
- windows.build_wix_installer(winrm_client, arch, DIST_PATH)
+ windows.build_wix_installer(winrm_client, arch, DIST_PATH,
+ version=version)
def terminate_ec2_instances(hga: HGAutomation, aws_region):
- c = hga.aws_connection(aws_region)
+ c = hga.aws_connection(aws_region, ensure_ec2_state=False)
aws.terminate_ec2_instances(c.ec2resource)
def purge_ec2_resources(hga: HGAutomation, aws_region):
+ c = hga.aws_connection(aws_region, ensure_ec2_state=False)
+ aws.remove_resources(c)
+
+
+def run_tests_linux(hga: HGAutomation, aws_region, instance_type,
+ python_version, test_flags, distro, filesystem):
c = hga.aws_connection(aws_region)
- aws.remove_resources(c)
+ image = aws.ensure_linux_dev_ami(c, distro=distro)
+
+ t_start = time.time()
+
+ ensure_extra_volume = filesystem not in ('default', 'tmpfs')
+
+ with aws.temporary_linux_dev_instances(
+ c, image, instance_type,
+ ensure_extra_volume=ensure_extra_volume) as insts:
+
+ instance = insts[0]
+
+ linux.prepare_exec_environment(instance.ssh_client,
+ filesystem=filesystem)
+ linux.synchronize_hg(SOURCE_ROOT, instance, '.')
+ t_prepared = time.time()
+ linux.run_tests(instance.ssh_client, python_version,
+ test_flags)
+ t_done = time.time()
+
+ t_setup = t_prepared - t_start
+ t_all = t_done - t_start
+
+ print(
+ 'total time: %.1fs; setup: %.1fs; tests: %.1fs; setup overhead: %.1f%%'
+ % (t_all, t_setup, t_done - t_prepared, t_setup / t_all * 100.0))
def run_tests_windows(hga: HGAutomation, aws_region, instance_type,
@@ -135,6 +199,21 @@
subparsers = parser.add_subparsers()
sp = subparsers.add_parser(
+ 'bootstrap-linux-dev',
+ help='Bootstrap Linux development environments',
+ )
+ sp.add_argument(
+ '--distros',
+ help='Comma delimited list of distros to bootstrap',
+ )
+ sp.add_argument(
+ '--parallel',
+ action='store_true',
+ help='Generate AMIs in parallel (not CTRL-c safe)'
+ )
+ sp.set_defaults(func=bootstrap_linux_dev)
+
+ sp = subparsers.add_parser(
'bootstrap-windows-dev',
help='Bootstrap the Windows development environment',
)
@@ -149,6 +228,10 @@
help='Mercurial revision to build',
default='.',
)
+ sp.add_argument(
+ '--version',
+ help='Mercurial version string to use',
+ )
sp.set_defaults(func=build_all_windows_packages)
sp = subparsers.add_parser(
@@ -226,6 +309,41 @@
sp.set_defaults(func=purge_ec2_resources)
sp = subparsers.add_parser(
+ 'run-tests-linux',
+ help='Run tests on Linux',
+ )
+ sp.add_argument(
+ '--distro',
+ help='Linux distribution to run tests on',
+ choices=linux.DISTROS,
+ default='debian9',
+ )
+ sp.add_argument(
+ '--filesystem',
+ help='Filesystem type to use',
+ choices={'btrfs', 'default', 'ext3', 'ext4', 'jfs', 'tmpfs', 'xfs'},
+ default='default',
+ )
+ sp.add_argument(
+ '--instance-type',
+ help='EC2 instance type to use',
+ default='c5.9xlarge',
+ )
+ sp.add_argument(
+ '--python-version',
+ help='Python version to use',
+ choices={'system2', 'system3', '2.7', '3.5', '3.6', '3.7', '3.8',
+ 'pypy', 'pypy3.5', 'pypy3.6'},
+ default='system2',
+ )
+ sp.add_argument(
+ 'test_flags',
+ help='Extra command line flags to pass to run-tests.py',
+ nargs='*',
+ )
+ sp.set_defaults(func=run_tests_linux)
+
+ sp = subparsers.add_parser(
'run-tests-windows',
help='Run tests on Windows',
)
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/contrib/automation/hgautomation/linux.py Mon Jul 22 14:00:33 2019 -0400
@@ -0,0 +1,545 @@
+# linux.py - Linux specific automation functionality
+#
+# Copyright 2019 Gregory Szorc <gregory.szorc@gmail.com>
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+
+# no-check-code because Python 3 native.
+
+import os
+import pathlib
+import shlex
+import subprocess
+import tempfile
+
+from .ssh import (
+ exec_command,
+)
+
+
+# Linux distributions that are supported.
+DISTROS = {
+ 'debian9',
+ 'ubuntu18.04',
+ 'ubuntu18.10',
+ 'ubuntu19.04',
+}
+
+INSTALL_PYTHONS = r'''
+PYENV2_VERSIONS="2.7.16 pypy2.7-7.1.1"
+PYENV3_VERSIONS="3.5.7 3.6.8 3.7.3 3.8-dev pypy3.5-7.0.0 pypy3.6-7.1.1"
+
+git clone https://github.com/pyenv/pyenv.git /hgdev/pyenv
+pushd /hgdev/pyenv
+git checkout 3faeda67bb33e07750d1a104271369a7384ca45c
+popd
+
+export PYENV_ROOT="/hgdev/pyenv"
+export PATH="$PYENV_ROOT/bin:$PATH"
+
+# pip 19.0.3.
+PIP_SHA256=efe99298f3fbb1f56201ce6b81d2658067d2f7d7dfc2d412e0d3cacc9a397c61
+wget -O get-pip.py --progress dot:mega https://github.com/pypa/get-pip/raw/fee32c376da1ff6496a798986d7939cd51e1644f/get-pip.py
+echo "${PIP_SHA256} get-pip.py" | sha256sum --check -
+
+VIRTUALENV_SHA256=984d7e607b0a5d1329425dd8845bd971b957424b5ba664729fab51ab8c11bc39
+VIRTUALENV_TARBALL=virtualenv-16.4.3.tar.gz
+wget -O ${VIRTUALENV_TARBALL} --progress dot:mega https://files.pythonhosted.org/packages/37/db/89d6b043b22052109da35416abc3c397655e4bd3cff031446ba02b9654fa/${VIRTUALENV_TARBALL}
+echo "${VIRTUALENV_SHA256} ${VIRTUALENV_TARBALL}" | sha256sum --check -
+
+for v in ${PYENV2_VERSIONS}; do
+ pyenv install -v ${v}
+ ${PYENV_ROOT}/versions/${v}/bin/python get-pip.py
+ ${PYENV_ROOT}/versions/${v}/bin/pip install ${VIRTUALENV_TARBALL}
+ ${PYENV_ROOT}/versions/${v}/bin/pip install -r /hgdev/requirements-py2.txt
+done
+
+for v in ${PYENV3_VERSIONS}; do
+ pyenv install -v ${v}
+ ${PYENV_ROOT}/versions/${v}/bin/python get-pip.py
+ ${PYENV_ROOT}/versions/${v}/bin/pip install -r /hgdev/requirements-py3.txt
+done
+
+pyenv global ${PYENV2_VERSIONS} ${PYENV3_VERSIONS} system
+'''.lstrip().replace('\r\n', '\n')
+
+
+BOOTSTRAP_VIRTUALENV = r'''
+/usr/bin/virtualenv /hgdev/venv-bootstrap
+
+HG_SHA256=1bdd21bb87d1e05fb5cd395d488d0e0cc2f2f90ce0fd248e31a03595da5ccb47
+HG_TARBALL=mercurial-4.9.1.tar.gz
+
+wget -O ${HG_TARBALL} --progress dot:mega https://www.mercurial-scm.org/release/${HG_TARBALL}
+echo "${HG_SHA256} ${HG_TARBALL}" | sha256sum --check -
+
+/hgdev/venv-bootstrap/bin/pip install ${HG_TARBALL}
+'''.lstrip().replace('\r\n', '\n')
+
+
+BOOTSTRAP_DEBIAN = r'''
+#!/bin/bash
+
+set -ex
+
+DISTRO=`grep DISTRIB_ID /etc/lsb-release | awk -F= '{{print $2}}'`
+DEBIAN_VERSION=`cat /etc/debian_version`
+LSB_RELEASE=`lsb_release -cs`
+
+sudo /usr/sbin/groupadd hg
+sudo /usr/sbin/groupadd docker
+sudo /usr/sbin/useradd -g hg -G sudo,docker -d /home/hg -m -s /bin/bash hg
+sudo mkdir /home/hg/.ssh
+sudo cp ~/.ssh/authorized_keys /home/hg/.ssh/authorized_keys
+sudo chown -R hg:hg /home/hg/.ssh
+sudo chmod 700 /home/hg/.ssh
+sudo chmod 600 /home/hg/.ssh/authorized_keys
+
+cat << EOF | sudo tee /etc/sudoers.d/90-hg
+hg ALL=(ALL) NOPASSWD:ALL
+EOF
+
+sudo apt-get update
+sudo DEBIAN_FRONTEND=noninteractive apt-get -yq dist-upgrade
+
+# Install packages necessary to set up Docker Apt repo.
+sudo DEBIAN_FRONTEND=noninteractive apt-get -yq install --no-install-recommends \
+ apt-transport-https \
+ gnupg
+
+cat > docker-apt-key << EOF
+-----BEGIN PGP PUBLIC KEY BLOCK-----
+
+mQINBFit2ioBEADhWpZ8/wvZ6hUTiXOwQHXMAlaFHcPH9hAtr4F1y2+OYdbtMuth
+lqqwp028AqyY+PRfVMtSYMbjuQuu5byyKR01BbqYhuS3jtqQmljZ/bJvXqnmiVXh
+38UuLa+z077PxyxQhu5BbqntTPQMfiyqEiU+BKbq2WmANUKQf+1AmZY/IruOXbnq
+L4C1+gJ8vfmXQt99npCaxEjaNRVYfOS8QcixNzHUYnb6emjlANyEVlZzeqo7XKl7
+UrwV5inawTSzWNvtjEjj4nJL8NsLwscpLPQUhTQ+7BbQXAwAmeHCUTQIvvWXqw0N
+cmhh4HgeQscQHYgOJjjDVfoY5MucvglbIgCqfzAHW9jxmRL4qbMZj+b1XoePEtht
+ku4bIQN1X5P07fNWzlgaRL5Z4POXDDZTlIQ/El58j9kp4bnWRCJW0lya+f8ocodo
+vZZ+Doi+fy4D5ZGrL4XEcIQP/Lv5uFyf+kQtl/94VFYVJOleAv8W92KdgDkhTcTD
+G7c0tIkVEKNUq48b3aQ64NOZQW7fVjfoKwEZdOqPE72Pa45jrZzvUFxSpdiNk2tZ
+XYukHjlxxEgBdC/J3cMMNRE1F4NCA3ApfV1Y7/hTeOnmDuDYwr9/obA8t016Yljj
+q5rdkywPf4JF8mXUW5eCN1vAFHxeg9ZWemhBtQmGxXnw9M+z6hWwc6ahmwARAQAB
+tCtEb2NrZXIgUmVsZWFzZSAoQ0UgZGViKSA8ZG9ja2VyQGRvY2tlci5jb20+iQI3
+BBMBCgAhBQJYrefAAhsvBQsJCAcDBRUKCQgLBRYCAwEAAh4BAheAAAoJEI2BgDwO
+v82IsskP/iQZo68flDQmNvn8X5XTd6RRaUH33kXYXquT6NkHJciS7E2gTJmqvMqd
+tI4mNYHCSEYxI5qrcYV5YqX9P6+Ko+vozo4nseUQLPH/ATQ4qL0Zok+1jkag3Lgk
+jonyUf9bwtWxFp05HC3GMHPhhcUSexCxQLQvnFWXD2sWLKivHp2fT8QbRGeZ+d3m
+6fqcd5Fu7pxsqm0EUDK5NL+nPIgYhN+auTrhgzhK1CShfGccM/wfRlei9Utz6p9P
+XRKIlWnXtT4qNGZNTN0tR+NLG/6Bqd8OYBaFAUcue/w1VW6JQ2VGYZHnZu9S8LMc
+FYBa5Ig9PxwGQOgq6RDKDbV+PqTQT5EFMeR1mrjckk4DQJjbxeMZbiNMG5kGECA8
+g383P3elhn03WGbEEa4MNc3Z4+7c236QI3xWJfNPdUbXRaAwhy/6rTSFbzwKB0Jm
+ebwzQfwjQY6f55MiI/RqDCyuPj3r3jyVRkK86pQKBAJwFHyqj9KaKXMZjfVnowLh
+9svIGfNbGHpucATqREvUHuQbNnqkCx8VVhtYkhDb9fEP2xBu5VvHbR+3nfVhMut5
+G34Ct5RS7Jt6LIfFdtcn8CaSas/l1HbiGeRgc70X/9aYx/V/CEJv0lIe8gP6uDoW
+FPIZ7d6vH+Vro6xuWEGiuMaiznap2KhZmpkgfupyFmplh0s6knymuQINBFit2ioB
+EADneL9S9m4vhU3blaRjVUUyJ7b/qTjcSylvCH5XUE6R2k+ckEZjfAMZPLpO+/tF
+M2JIJMD4SifKuS3xck9KtZGCufGmcwiLQRzeHF7vJUKrLD5RTkNi23ydvWZgPjtx
+Q+DTT1Zcn7BrQFY6FgnRoUVIxwtdw1bMY/89rsFgS5wwuMESd3Q2RYgb7EOFOpnu
+w6da7WakWf4IhnF5nsNYGDVaIHzpiqCl+uTbf1epCjrOlIzkZ3Z3Yk5CM/TiFzPk
+z2lLz89cpD8U+NtCsfagWWfjd2U3jDapgH+7nQnCEWpROtzaKHG6lA3pXdix5zG8
+eRc6/0IbUSWvfjKxLLPfNeCS2pCL3IeEI5nothEEYdQH6szpLog79xB9dVnJyKJb
+VfxXnseoYqVrRz2VVbUI5Blwm6B40E3eGVfUQWiux54DspyVMMk41Mx7QJ3iynIa
+1N4ZAqVMAEruyXTRTxc9XW0tYhDMA/1GYvz0EmFpm8LzTHA6sFVtPm/ZlNCX6P1X
+zJwrv7DSQKD6GGlBQUX+OeEJ8tTkkf8QTJSPUdh8P8YxDFS5EOGAvhhpMBYD42kQ
+pqXjEC+XcycTvGI7impgv9PDY1RCC1zkBjKPa120rNhv/hkVk/YhuGoajoHyy4h7
+ZQopdcMtpN2dgmhEegny9JCSwxfQmQ0zK0g7m6SHiKMwjwARAQABiQQ+BBgBCAAJ
+BQJYrdoqAhsCAikJEI2BgDwOv82IwV0gBBkBCAAGBQJYrdoqAAoJEH6gqcPyc/zY
+1WAP/2wJ+R0gE6qsce3rjaIz58PJmc8goKrir5hnElWhPgbq7cYIsW5qiFyLhkdp
+YcMmhD9mRiPpQn6Ya2w3e3B8zfIVKipbMBnke/ytZ9M7qHmDCcjoiSmwEXN3wKYI
+mD9VHONsl/CG1rU9Isw1jtB5g1YxuBA7M/m36XN6x2u+NtNMDB9P56yc4gfsZVES
+KA9v+yY2/l45L8d/WUkUi0YXomn6hyBGI7JrBLq0CX37GEYP6O9rrKipfz73XfO7
+JIGzOKZlljb/D9RX/g7nRbCn+3EtH7xnk+TK/50euEKw8SMUg147sJTcpQmv6UzZ
+cM4JgL0HbHVCojV4C/plELwMddALOFeYQzTif6sMRPf+3DSj8frbInjChC3yOLy0
+6br92KFom17EIj2CAcoeq7UPhi2oouYBwPxh5ytdehJkoo+sN7RIWua6P2WSmon5
+U888cSylXC0+ADFdgLX9K2zrDVYUG1vo8CX0vzxFBaHwN6Px26fhIT1/hYUHQR1z
+VfNDcyQmXqkOnZvvoMfz/Q0s9BhFJ/zU6AgQbIZE/hm1spsfgvtsD1frZfygXJ9f
+irP+MSAI80xHSf91qSRZOj4Pl3ZJNbq4yYxv0b1pkMqeGdjdCYhLU+LZ4wbQmpCk
+SVe2prlLureigXtmZfkqevRz7FrIZiu9ky8wnCAPwC7/zmS18rgP/17bOtL4/iIz
+QhxAAoAMWVrGyJivSkjhSGx1uCojsWfsTAm11P7jsruIL61ZzMUVE2aM3Pmj5G+W
+9AcZ58Em+1WsVnAXdUR//bMmhyr8wL/G1YO1V3JEJTRdxsSxdYa4deGBBY/Adpsw
+24jxhOJR+lsJpqIUeb999+R8euDhRHG9eFO7DRu6weatUJ6suupoDTRWtr/4yGqe
+dKxV3qQhNLSnaAzqW/1nA3iUB4k7kCaKZxhdhDbClf9P37qaRW467BLCVO/coL3y
+Vm50dwdrNtKpMBh3ZpbB1uJvgi9mXtyBOMJ3v8RZeDzFiG8HdCtg9RvIt/AIFoHR
+H3S+U79NT6i0KPzLImDfs8T7RlpyuMc4Ufs8ggyg9v3Ae6cN3eQyxcK3w0cbBwsh
+/nQNfsA6uu+9H7NhbehBMhYnpNZyrHzCmzyXkauwRAqoCbGCNykTRwsur9gS41TQ
+M8ssD1jFheOJf3hODnkKU+HKjvMROl1DK7zdmLdNzA1cvtZH/nCC9KPj1z8QC47S
+xx+dTZSx4ONAhwbS/LN3PoKtn8LPjY9NP9uDWI+TWYquS2U+KHDrBDlsgozDbs/O
+jCxcpDzNmXpWQHEtHU7649OXHP7UeNST1mCUCH5qdank0V1iejF6/CfTFU4MfcrG
+YT90qFF93M3v01BbxP+EIY2/9tiIPbrd
+=0YYh
+-----END PGP PUBLIC KEY BLOCK-----
+EOF
+
+sudo apt-key add docker-apt-key
+
+if [ "$DEBIAN_VERSION" = "9.8" ]; then
+cat << EOF | sudo tee -a /etc/apt/sources.list
+# Need backports for clang-format-6.0
+deb http://deb.debian.org/debian stretch-backports main
+
+# Sources are useful if we want to compile things locally.
+deb-src http://deb.debian.org/debian stretch main
+deb-src http://security.debian.org/debian-security stretch/updates main
+deb-src http://deb.debian.org/debian stretch-updates main
+deb-src http://deb.debian.org/debian stretch-backports main
+
+deb [arch=amd64] https://download.docker.com/linux/debian stretch stable
+EOF
+
+elif [ "$DISTRO" = "Ubuntu" ]; then
+cat << EOF | sudo tee -a /etc/apt/sources.list
+deb [arch=amd64] https://download.docker.com/linux/ubuntu $LSB_RELEASE stable
+EOF
+
+fi
+
+sudo apt-get update
+
+PACKAGES="\
+ btrfs-progs \
+ build-essential \
+ bzr \
+ clang-format-6.0 \
+ cvs \
+ darcs \
+ debhelper \
+ devscripts \
+ dpkg-dev \
+ dstat \
+ emacs \
+ gettext \
+ git \
+ htop \
+ iotop \
+ jfsutils \
+ libbz2-dev \
+ libexpat1-dev \
+ libffi-dev \
+ libgdbm-dev \
+ liblzma-dev \
+ libncurses5-dev \
+ libnss3-dev \
+ libreadline-dev \
+ libsqlite3-dev \
+ libssl-dev \
+ netbase \
+ ntfs-3g \
+ nvme-cli \
+ pyflakes \
+ pyflakes3 \
+ pylint \
+ pylint3 \
+ python-all-dev \
+ python-dev \
+ python-docutils \
+ python-fuzzywuzzy \
+ python-pygments \
+ python-subversion \
+ python-vcr \
+ python3-dev \
+ python3-docutils \
+ python3-fuzzywuzzy \
+ python3-pygments \
+ python3-vcr \
+ rsync \
+ sqlite3 \
+ subversion \
+ tcl-dev \
+ tk-dev \
+ tla \
+ unzip \
+ uuid-dev \
+ vim \
+ virtualenv \
+ wget \
+ xfsprogs \
+ zip \
+ zlib1g-dev"
+
+if [ "$DEBIAN_VERSION" = "9.8" ]; then
+ PACKAGES="$PACKAGES linux-perf"
+elif [ "$DISTRO" = "Ubuntu" ]; then
+ PACKAGES="$PACKAGES linux-tools-common"
+fi
+
+# Ubuntu 19.04 removes monotone.
+if [ "$LSB_RELEASE" != "disco" ]; then
+ PACKAGES="$PACKAGES monotone"
+fi
+
+# As of April 27, 2019, Docker hasn't published packages for
+# Ubuntu 19.04 yet.
+if [ "$LSB_RELEASE" != "disco" ]; then
+ PACKAGES="$PACKAGES docker-ce"
+fi
+
+sudo DEBIAN_FRONTEND=noninteractive apt-get -yq install --no-install-recommends $PACKAGES
+
+# Create clang-format symlink so test harness finds it.
+sudo update-alternatives --install /usr/bin/clang-format clang-format \
+ /usr/bin/clang-format-6.0 1000
+
+sudo mkdir /hgdev
+# Will be normalized to hg:hg later.
+sudo chown `whoami` /hgdev
+
+cp requirements-py2.txt /hgdev/requirements-py2.txt
+cp requirements-py3.txt /hgdev/requirements-py3.txt
+
+# Disable the pip version check because it uses the network and can
+# be annoying.
+cat << EOF | sudo tee -a /etc/pip.conf
+[global]
+disable-pip-version-check = True
+EOF
+
+{install_pythons}
+{bootstrap_virtualenv}
+
+/hgdev/venv-bootstrap/bin/hg clone https://www.mercurial-scm.org/repo/hg /hgdev/src
+
+# Mark the repo as non-publishing.
+cat >> /hgdev/src/.hg/hgrc << EOF
+[phases]
+publish = false
+EOF
+
+sudo chown -R hg:hg /hgdev
+'''.lstrip().format(
+ install_pythons=INSTALL_PYTHONS,
+ bootstrap_virtualenv=BOOTSTRAP_VIRTUALENV
+).replace('\r\n', '\n')
+
+
+# Prepares /hgdev for operations.
+PREPARE_HGDEV = '''
+#!/bin/bash
+
+set -e
+
+FS=$1
+
+ensure_device() {
+ if [ -z "${DEVICE}" ]; then
+ echo "could not find block device to format"
+ exit 1
+ fi
+}
+
+# Determine device to partition for extra filesystem.
+# If only 1 volume is present, it will be the root volume and
+# should be /dev/nvme0. If multiple volumes are present, the
+# root volume could be nvme0 or nvme1. Use whichever one doesn't have
+# a partition.
+if [ -e /dev/nvme1n1 ]; then
+ if [ -e /dev/nvme0n1p1 ]; then
+ DEVICE=/dev/nvme1n1
+ else
+ DEVICE=/dev/nvme0n1
+ fi
+else
+ DEVICE=
+fi
+
+sudo mkdir /hgwork
+
+if [ "${FS}" != "default" -a "${FS}" != "tmpfs" ]; then
+ ensure_device
+ echo "creating ${FS} filesystem on ${DEVICE}"
+fi
+
+if [ "${FS}" = "default" ]; then
+ :
+
+elif [ "${FS}" = "btrfs" ]; then
+ sudo mkfs.btrfs ${DEVICE}
+ sudo mount ${DEVICE} /hgwork
+
+elif [ "${FS}" = "ext3" ]; then
+ # lazy_journal_init speeds up filesystem creation at the expense of
+ # integrity if things crash. We are an ephemeral instance, so we don't
+ # care about integrity.
+ sudo mkfs.ext3 -E lazy_journal_init=1 ${DEVICE}
+ sudo mount ${DEVICE} /hgwork
+
+elif [ "${FS}" = "ext4" ]; then
+ sudo mkfs.ext4 -E lazy_journal_init=1 ${DEVICE}
+ sudo mount ${DEVICE} /hgwork
+
+elif [ "${FS}" = "jfs" ]; then
+ sudo mkfs.jfs ${DEVICE}
+ sudo mount ${DEVICE} /hgwork
+
+elif [ "${FS}" = "tmpfs" ]; then
+ echo "creating tmpfs volume in /hgwork"
+ sudo mount -t tmpfs -o size=1024M tmpfs /hgwork
+
+elif [ "${FS}" = "xfs" ]; then
+ sudo mkfs.xfs ${DEVICE}
+ sudo mount ${DEVICE} /hgwork
+
+else
+ echo "unsupported filesystem: ${FS}"
+ exit 1
+fi
+
+echo "/hgwork ready"
+
+sudo chown hg:hg /hgwork
+mkdir /hgwork/tmp
+chown hg:hg /hgwork/tmp
+
+rsync -a /hgdev/src /hgwork/
+'''.lstrip().replace('\r\n', '\n')
+
+
+HG_UPDATE_CLEAN = '''
+set -ex
+
+HG=/hgdev/venv-bootstrap/bin/hg
+
+cd /hgwork/src
+${HG} --config extensions.purge= purge --all
+${HG} update -C $1
+${HG} log -r .
+'''.lstrip().replace('\r\n', '\n')
+
+
+def prepare_exec_environment(ssh_client, filesystem='default'):
+ """Prepare an EC2 instance to execute things.
+
+ The AMI has an ``/hgdev`` bootstrapped with various Python installs
+ and a clone of the Mercurial repo.
+
+ In EC2, EBS volumes launched from snapshots have wonky performance behavior.
+ Notably, blocks have to be copied on first access, which makes volume
+ I/O extremely slow on fresh volumes.
+
+ Furthermore, we may want to run operations, tests, etc on alternative
+ filesystems so we examine behavior on different filesystems.
+
+ This function is used to facilitate executing operations on alternate
+ volumes.
+ """
+ sftp = ssh_client.open_sftp()
+
+ with sftp.open('/hgdev/prepare-hgdev', 'wb') as fh:
+ fh.write(PREPARE_HGDEV)
+ fh.chmod(0o0777)
+
+ command = 'sudo /hgdev/prepare-hgdev %s' % filesystem
+ chan, stdin, stdout = exec_command(ssh_client, command)
+ stdin.close()
+
+ for line in stdout:
+ print(line, end='')
+
+ res = chan.recv_exit_status()
+
+ if res:
+ raise Exception('non-0 exit code updating working directory; %d'
+ % res)
+
+
+def synchronize_hg(source_path: pathlib.Path, ec2_instance, revision: str=None):
+ """Synchronize a local Mercurial source path to remote EC2 instance."""
+
+ with tempfile.TemporaryDirectory() as temp_dir:
+ temp_dir = pathlib.Path(temp_dir)
+
+ ssh_dir = temp_dir / '.ssh'
+ ssh_dir.mkdir()
+ ssh_dir.chmod(0o0700)
+
+ public_ip = ec2_instance.public_ip_address
+
+ ssh_config = ssh_dir / 'config'
+
+ with ssh_config.open('w', encoding='utf-8') as fh:
+ fh.write('Host %s\n' % public_ip)
+ fh.write(' User hg\n')
+ fh.write(' StrictHostKeyChecking no\n')
+ fh.write(' UserKnownHostsFile %s\n' % (ssh_dir / 'known_hosts'))
+ fh.write(' IdentityFile %s\n' % ec2_instance.ssh_private_key_path)
+
+ if not (source_path / '.hg').is_dir():
+ raise Exception('%s is not a Mercurial repository; synchronization '
+ 'not yet supported' % source_path)
+
+ env = dict(os.environ)
+ env['HGPLAIN'] = '1'
+ env['HGENCODING'] = 'utf-8'
+
+ hg_bin = source_path / 'hg'
+
+ res = subprocess.run(
+ ['python2.7', str(hg_bin), 'log', '-r', revision, '-T', '{node}'],
+ cwd=str(source_path), env=env, check=True, capture_output=True)
+
+ full_revision = res.stdout.decode('ascii')
+
+ args = [
+ 'python2.7', str(hg_bin),
+ '--config', 'ui.ssh=ssh -F %s' % ssh_config,
+ '--config', 'ui.remotecmd=/hgdev/venv-bootstrap/bin/hg',
+ 'push', '-f', '-r', full_revision,
+ 'ssh://%s//hgwork/src' % public_ip,
+ ]
+
+ subprocess.run(args, cwd=str(source_path), env=env, check=True)
+
+ # TODO support synchronizing dirty working directory.
+
+ sftp = ec2_instance.ssh_client.open_sftp()
+
+ with sftp.open('/hgdev/hgup', 'wb') as fh:
+ fh.write(HG_UPDATE_CLEAN)
+ fh.chmod(0o0700)
+
+ chan, stdin, stdout = exec_command(
+ ec2_instance.ssh_client, '/hgdev/hgup %s' % full_revision)
+ stdin.close()
+
+ for line in stdout:
+ print(line, end='')
+
+ res = chan.recv_exit_status()
+
+ if res:
+ raise Exception('non-0 exit code updating working directory; %d'
+ % res)
+
+
+def run_tests(ssh_client, python_version, test_flags=None):
+ """Run tests on a remote Linux machine via an SSH client."""
+ test_flags = test_flags or []
+
+ print('running tests')
+
+ if python_version == 'system2':
+ python = '/usr/bin/python2'
+ elif python_version == 'system3':
+ python = '/usr/bin/python3'
+ elif python_version.startswith('pypy'):
+ python = '/hgdev/pyenv/shims/%s' % python_version
+ else:
+ python = '/hgdev/pyenv/shims/python%s' % python_version
+
+ test_flags = ' '.join(shlex.quote(a) for a in test_flags)
+
+ command = (
+ '/bin/sh -c "export TMPDIR=/hgwork/tmp; '
+ 'cd /hgwork/src/tests && %s run-tests.py %s"' % (
+ python, test_flags))
+
+ chan, stdin, stdout = exec_command(ssh_client, command)
+
+ stdin.close()
+
+ for line in stdout:
+ print(line, end='')
+
+ return chan.recv_exit_status()
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/contrib/automation/hgautomation/ssh.py Mon Jul 22 14:00:33 2019 -0400
@@ -0,0 +1,67 @@
+# ssh.py - Interact with remote SSH servers
+#
+# Copyright 2019 Gregory Szorc <gregory.szorc@gmail.com>
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+
+# no-check-code because Python 3 native.
+
+import socket
+import time
+import warnings
+
+from cryptography.utils import (
+ CryptographyDeprecationWarning,
+)
+import paramiko
+
+
+def wait_for_ssh(hostname, port, timeout=60, username=None, key_filename=None):
+ """Wait for an SSH server to start on the specified host and port."""
+ class IgnoreHostKeyPolicy(paramiko.MissingHostKeyPolicy):
+ def missing_host_key(self, client, hostname, key):
+ return
+
+ end_time = time.time() + timeout
+
+ # paramiko triggers a CryptographyDeprecationWarning in the cryptography
+ # package. Let's suppress
+ with warnings.catch_warnings():
+ warnings.filterwarnings('ignore',
+ category=CryptographyDeprecationWarning)
+
+ while True:
+ client = paramiko.SSHClient()
+ client.set_missing_host_key_policy(IgnoreHostKeyPolicy())
+ try:
+ client.connect(hostname, port=port, username=username,
+ key_filename=key_filename,
+ timeout=5.0, allow_agent=False,
+ look_for_keys=False)
+
+ return client
+ except socket.error:
+ pass
+ except paramiko.AuthenticationException:
+ raise
+ except paramiko.SSHException:
+ pass
+
+ if time.time() >= end_time:
+ raise Exception('Timeout reached waiting for SSH')
+
+ time.sleep(1.0)
+
+
+def exec_command(client, command):
+ """exec_command wrapper that combines stderr/stdout and returns channel"""
+ chan = client.get_transport().open_session()
+
+ chan.exec_command(command)
+ chan.set_combine_stderr(True)
+
+ stdin = chan.makefile('wb', -1)
+ stdout = chan.makefile('r', -1)
+
+ return chan, stdin, stdout
--- a/contrib/automation/hgautomation/windows.py Tue Jul 09 10:07:35 2019 -0400
+++ b/contrib/automation/hgautomation/windows.py Mon Jul 22 14:00:33 2019 -0400
@@ -39,7 +39,7 @@
$Env:PATH = "${root}\VC\Bin;${root}\WinSDK\Bin;$Env:PATH"
$Env:INCLUDE = "${root}\VC\Include;${root}\WinSDK\Include;$Env:INCLUDE"
$Env:LIB = "${root}\VC\Lib;${root}\WinSDK\Lib;$Env:LIB"
-$Env:LIBPATH = "${root}\VC\lib;${root}\WinSDK\Lib:$Env:LIBPATH"
+$Env:LIBPATH = "${root}\VC\lib;${root}\WinSDK\Lib;$Env:LIBPATH"
'''.lstrip()
HG_PURGE = r'''
@@ -156,6 +156,10 @@
fh.write(' UserKnownHostsFile %s\n' % (ssh_dir / 'known_hosts'))
fh.write(' IdentityFile %s\n' % (ssh_dir / 'id_rsa'))
+ if not (hg_repo / '.hg').is_dir():
+ raise Exception('%s is not a Mercurial repository; '
+ 'synchronization not yet supported' % hg_repo)
+
env = dict(os.environ)
env['HGPLAIN'] = '1'
env['HGENCODING'] = 'utf-8'
@@ -172,7 +176,8 @@
'python2.7', hg_bin,
'--config', 'ui.ssh=ssh -F %s' % ssh_config,
'--config', 'ui.remotecmd=c:/hgdev/venv-bootstrap/Scripts/hg.exe',
- 'push', '-r', full_revision, 'ssh://%s/c:/hgdev/src' % public_ip,
+ 'push', '-f', '-r', full_revision,
+ 'ssh://%s/c:/hgdev/src' % public_ip,
]
subprocess.run(args, cwd=str(hg_repo), env=env, check=True)
--- a/contrib/automation/hgautomation/winrm.py Tue Jul 09 10:07:35 2019 -0400
+++ b/contrib/automation/hgautomation/winrm.py Mon Jul 22 14:00:33 2019 -0400
@@ -25,7 +25,7 @@
logger = logging.getLogger(__name__)
-def wait_for_winrm(host, username, password, timeout=120, ssl=False):
+def wait_for_winrm(host, username, password, timeout=180, ssl=False):
"""Wait for the Windows Remoting (WinRM) service to become available.
Returns a ``psrpclient.Client`` instance.
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/contrib/automation/linux-requirements-py2.txt Mon Jul 22 14:00:33 2019 -0400
@@ -0,0 +1,130 @@
+#
+# This file is autogenerated by pip-compile
+# To update, run:
+#
+# pip-compile -U --generate-hashes --output-file contrib/automation/linux-requirements-py2.txt contrib/automation/linux-requirements.txt.in
+#
+astroid==1.6.6 \
+ --hash=sha256:87de48a92e29cedf7210ffa853d11441e7ad94cb47bacd91b023499b51cbc756 \
+ --hash=sha256:d25869fc7f44f1d9fb7d24fd7ea0639656f5355fc3089cd1f3d18c6ec6b124c7 \
+ # via pylint
+backports.functools-lru-cache==1.5 \
+ --hash=sha256:9d98697f088eb1b0fa451391f91afb5e3ebde16bbdb272819fd091151fda4f1a \
+ --hash=sha256:f0b0e4eba956de51238e17573b7087e852dfe9854afd2e9c873f73fc0ca0a6dd \
+ # via astroid, isort, pylint
+bzr==2.7.0 ; python_version <= "2.7" and platform_python_implementation == "CPython" \
+ --hash=sha256:c9f6bbe0a50201dadc5fddadd94ba50174193c6cf6e39e16f6dd0ad98a1df338
+configparser==3.7.4 \
+ --hash=sha256:8be81d89d6e7b4c0d4e44bcc525845f6da25821de80cb5e06e7e0238a2899e32 \
+ --hash=sha256:da60d0014fd8c55eb48c1c5354352e363e2d30bbf7057e5e171a468390184c75 \
+ # via pylint
+contextlib2==0.5.5 \
+ --hash=sha256:509f9419ee91cdd00ba34443217d5ca51f5a364a404e1dce9e8979cea969ca48 \
+ --hash=sha256:f5260a6e679d2ff42ec91ec5252f4eeffdcf21053db9113bd0a8e4d953769c00 \
+ # via vcrpy
+docutils==0.14 \
+ --hash=sha256:02aec4bd92ab067f6ff27a38a38a41173bf01bed8f89157768c1573f53e474a6 \
+ --hash=sha256:51e64ef2ebfb29cae1faa133b3710143496eca21c530f3f71424d77687764274 \
+ --hash=sha256:7a4bd47eaf6596e1295ecb11361139febe29b084a87bf005bf899f9a42edc3c6
+enum34==1.1.6 \
+ --hash=sha256:2d81cbbe0e73112bdfe6ef8576f2238f2ba27dd0d55752a776c41d38b7da2850 \
+ --hash=sha256:644837f692e5f550741432dd3f223bbb9852018674981b1664e5dc339387588a \
+ --hash=sha256:6bd0f6ad48ec2aa117d3d141940d484deccda84d4fcd884f5c3d93c23ecd8c79 \
+ --hash=sha256:8ad8c4783bf61ded74527bffb48ed9b54166685e4230386a9ed9b1279e2df5b1 \
+ # via astroid
+funcsigs==1.0.2 \
+ --hash=sha256:330cc27ccbf7f1e992e69fef78261dc7c6569012cf397db8d3de0234e6c937ca \
+ --hash=sha256:a7bb0f2cf3a3fd1ab2732cb49eba4252c2af4240442415b4abce3b87022a8f50 \
+ # via mock
+futures==3.2.0 \
+ --hash=sha256:9ec02aa7d674acb8618afb127e27fde7fc68994c0437ad759fa094a574adb265 \
+ --hash=sha256:ec0a6cb848cc212002b9828c3e34c675e0c9ff6741dc445cab6fdd4e1085d1f1 \
+ # via isort
+fuzzywuzzy==0.17.0 \
+ --hash=sha256:5ac7c0b3f4658d2743aa17da53a55598144edbc5bee3c6863840636e6926f254 \
+ --hash=sha256:6f49de47db00e1c71d40ad16da42284ac357936fa9b66bea1df63fed07122d62
+isort==4.3.17 \
+ --hash=sha256:01cb7e1ca5e6c5b3f235f0385057f70558b70d2f00320208825fa62887292f43 \
+ --hash=sha256:268067462aed7eb2a1e237fcb287852f22077de3fb07964e87e00f829eea2d1a \
+ # via pylint
+lazy-object-proxy==1.3.1 \
+ --hash=sha256:0ce34342b419bd8f018e6666bfef729aec3edf62345a53b537a4dcc115746a33 \
+ --hash=sha256:1b668120716eb7ee21d8a38815e5eb3bb8211117d9a90b0f8e21722c0758cc39 \
+ --hash=sha256:209615b0fe4624d79e50220ce3310ca1a9445fd8e6d3572a896e7f9146bbf019 \
+ --hash=sha256:27bf62cb2b1a2068d443ff7097ee33393f8483b570b475db8ebf7e1cba64f088 \
+ --hash=sha256:27ea6fd1c02dcc78172a82fc37fcc0992a94e4cecf53cb6d73f11749825bd98b \
+ --hash=sha256:2c1b21b44ac9beb0fc848d3993924147ba45c4ebc24be19825e57aabbe74a99e \
+ --hash=sha256:2df72ab12046a3496a92476020a1a0abf78b2a7db9ff4dc2036b8dd980203ae6 \
+ --hash=sha256:320ffd3de9699d3892048baee45ebfbbf9388a7d65d832d7e580243ade426d2b \
+ --hash=sha256:50e3b9a464d5d08cc5227413db0d1c4707b6172e4d4d915c1c70e4de0bbff1f5 \
+ --hash=sha256:5276db7ff62bb7b52f77f1f51ed58850e315154249aceb42e7f4c611f0f847ff \
+ --hash=sha256:61a6cf00dcb1a7f0c773ed4acc509cb636af2d6337a08f362413c76b2b47a8dd \
+ --hash=sha256:6ae6c4cb59f199d8827c5a07546b2ab7e85d262acaccaacd49b62f53f7c456f7 \
+ --hash=sha256:7661d401d60d8bf15bb5da39e4dd72f5d764c5aff5a86ef52a042506e3e970ff \
+ --hash=sha256:7bd527f36a605c914efca5d3d014170b2cb184723e423d26b1fb2fd9108e264d \
+ --hash=sha256:7cb54db3535c8686ea12e9535eb087d32421184eacc6939ef15ef50f83a5e7e2 \
+ --hash=sha256:7f3a2d740291f7f2c111d86a1c4851b70fb000a6c8883a59660d95ad57b9df35 \
+ --hash=sha256:81304b7d8e9c824d058087dcb89144842c8e0dea6d281c031f59f0acf66963d4 \
+ --hash=sha256:933947e8b4fbe617a51528b09851685138b49d511af0b6c0da2539115d6d4514 \
+ --hash=sha256:94223d7f060301b3a8c09c9b3bc3294b56b2188e7d8179c762a1cda72c979252 \
+ --hash=sha256:ab3ca49afcb47058393b0122428358d2fbe0408cf99f1b58b295cfeb4ed39109 \
+ --hash=sha256:bd6292f565ca46dee4e737ebcc20742e3b5be2b01556dafe169f6c65d088875f \
+ --hash=sha256:cb924aa3e4a3fb644d0c463cad5bc2572649a6a3f68a7f8e4fbe44aaa6d77e4c \
+ --hash=sha256:d0fc7a286feac9077ec52a927fc9fe8fe2fabab95426722be4c953c9a8bede92 \
+ --hash=sha256:ddc34786490a6e4ec0a855d401034cbd1242ef186c20d79d2166d6a4bd449577 \
+ --hash=sha256:e34b155e36fa9da7e1b7c738ed7767fc9491a62ec6af70fe9da4a057759edc2d \
+ --hash=sha256:e5b9e8f6bda48460b7b143c3821b21b452cb3a835e6bbd5dd33aa0c8d3f5137d \
+ --hash=sha256:e81ebf6c5ee9684be8f2c87563880f93eedd56dd2b6146d8a725b50b7e5adb0f \
+ --hash=sha256:eb91be369f945f10d3a49f5f9be8b3d0b93a4c2be8f8a5b83b0571b8123e0a7a \
+ --hash=sha256:f460d1ceb0e4a5dcb2a652db0904224f367c9b3c1470d5a7683c0480e582468b \
+ # via astroid
+mccabe==0.6.1 \
+ --hash=sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42 \
+ --hash=sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f \
+ # via pylint
+mock==2.0.0 \
+ --hash=sha256:5ce3c71c5545b472da17b72268978914d0252980348636840bd34a00b5cc96c1 \
+ --hash=sha256:b158b6df76edd239b8208d481dc46b6afd45a846b7812ff0ce58971cf5bc8bba \
+ # via vcrpy
+pbr==5.1.3 \
+ --hash=sha256:8257baf496c8522437e8a6cfe0f15e00aedc6c0e0e7c9d55eeeeab31e0853843 \
+ --hash=sha256:8c361cc353d988e4f5b998555c88098b9d5964c2e11acf7b0d21925a66bb5824 \
+ # via mock
+pyflakes==2.1.1 \
+ --hash=sha256:17dbeb2e3f4d772725c777fabc446d5634d1038f234e77343108ce445ea69ce0 \
+ --hash=sha256:d976835886f8c5b31d47970ed689944a0262b5f3afa00a5a7b4dc81e5449f8a2
+pygments==2.3.1 \
+ --hash=sha256:5ffada19f6203563680669ee7f53b64dabbeb100eb51b61996085e99c03b284a \
+ --hash=sha256:e8218dd399a61674745138520d0d4cf2621d7e032439341bc3f647bff125818d
+pylint==1.9.4 \
+ --hash=sha256:02c2b6d268695a8b64ad61847f92e611e6afcff33fd26c3a2125370c4662905d \
+ --hash=sha256:ee1e85575587c5b58ddafa25e1c1b01691ef172e139fc25585e5d3f02451da93
+python-levenshtein==0.12.0 \
+ --hash=sha256:033a11de5e3d19ea25c9302d11224e1a1898fe5abd23c61c7c360c25195e3eb1
+pyyaml==5.1 \
+ --hash=sha256:1adecc22f88d38052fb787d959f003811ca858b799590a5eaa70e63dca50308c \
+ --hash=sha256:436bc774ecf7c103814098159fbb84c2715d25980175292c648f2da143909f95 \
+ --hash=sha256:460a5a4248763f6f37ea225d19d5c205677d8d525f6a83357ca622ed541830c2 \
+ --hash=sha256:5a22a9c84653debfbf198d02fe592c176ea548cccce47553f35f466e15cf2fd4 \
+ --hash=sha256:7a5d3f26b89d688db27822343dfa25c599627bc92093e788956372285c6298ad \
+ --hash=sha256:9372b04a02080752d9e6f990179a4ab840227c6e2ce15b95e1278456664cf2ba \
+ --hash=sha256:a5dcbebee834eaddf3fa7366316b880ff4062e4bcc9787b78c7fbb4a26ff2dd1 \
+ --hash=sha256:aee5bab92a176e7cd034e57f46e9df9a9862a71f8f37cad167c6fc74c65f5b4e \
+ --hash=sha256:c51f642898c0bacd335fc119da60baae0824f2cde95b0330b56c0553439f0673 \
+ --hash=sha256:c68ea4d3ba1705da1e0d85da6684ac657912679a649e8868bd850d2c299cce13 \
+ --hash=sha256:e23d0cc5299223dcc37885dae624f382297717e459ea24053709675a976a3e19 \
+ # via vcrpy
+singledispatch==3.4.0.3 \
+ --hash=sha256:5b06af87df13818d14f08a028e42f566640aef80805c3b50c5056b086e3c2b9c \
+ --hash=sha256:833b46966687b3de7f438c761ac475213e53b306740f1abfaa86e1d1aae56aa8 \
+ # via astroid, pylint
+six==1.12.0 \
+ --hash=sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c \
+ --hash=sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73 \
+ # via astroid, mock, pylint, singledispatch, vcrpy
+vcrpy==2.0.1 \
+ --hash=sha256:127e79cf7b569d071d1bd761b83f7b62b2ce2a2eb63ceca7aa67cba8f2602ea3 \
+ --hash=sha256:57be64aa8e9883a4117d0b15de28af62275c001abcdb00b6dc2d4406073d9a4f
+wrapt==1.11.1 \
+ --hash=sha256:4aea003270831cceb8a90ff27c4031da6ead7ec1886023b80ce0dfe0adf61533 \
+ # via astroid, vcrpy
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/contrib/automation/linux-requirements-py3.txt Mon Jul 22 14:00:33 2019 -0400
@@ -0,0 +1,159 @@
+#
+# This file is autogenerated by pip-compile
+# To update, run:
+#
+# pip-compile -U --generate-hashes --output-file contrib/automation/linux-requirements-py3.txt contrib/automation/linux-requirements.txt.in
+#
+astroid==2.2.5 \
+ --hash=sha256:6560e1e1749f68c64a4b5dee4e091fce798d2f0d84ebe638cf0e0585a343acf4 \
+ --hash=sha256:b65db1bbaac9f9f4d190199bb8680af6f6f84fd3769a5ea883df8a91fe68b4c4 \
+ # via pylint
+docutils==0.14 \
+ --hash=sha256:02aec4bd92ab067f6ff27a38a38a41173bf01bed8f89157768c1573f53e474a6 \
+ --hash=sha256:51e64ef2ebfb29cae1faa133b3710143496eca21c530f3f71424d77687764274 \
+ --hash=sha256:7a4bd47eaf6596e1295ecb11361139febe29b084a87bf005bf899f9a42edc3c6
+fuzzywuzzy==0.17.0 \
+ --hash=sha256:5ac7c0b3f4658d2743aa17da53a55598144edbc5bee3c6863840636e6926f254 \
+ --hash=sha256:6f49de47db00e1c71d40ad16da42284ac357936fa9b66bea1df63fed07122d62
+idna==2.8 \
+ --hash=sha256:c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407 \
+ --hash=sha256:ea8b7f6188e6fa117537c3df7da9fc686d485087abf6ac197f9c46432f7e4a3c \
+ # via yarl
+isort==4.3.17 \
+ --hash=sha256:01cb7e1ca5e6c5b3f235f0385057f70558b70d2f00320208825fa62887292f43 \
+ --hash=sha256:268067462aed7eb2a1e237fcb287852f22077de3fb07964e87e00f829eea2d1a \
+ # via pylint
+lazy-object-proxy==1.3.1 \
+ --hash=sha256:0ce34342b419bd8f018e6666bfef729aec3edf62345a53b537a4dcc115746a33 \
+ --hash=sha256:1b668120716eb7ee21d8a38815e5eb3bb8211117d9a90b0f8e21722c0758cc39 \
+ --hash=sha256:209615b0fe4624d79e50220ce3310ca1a9445fd8e6d3572a896e7f9146bbf019 \
+ --hash=sha256:27bf62cb2b1a2068d443ff7097ee33393f8483b570b475db8ebf7e1cba64f088 \
+ --hash=sha256:27ea6fd1c02dcc78172a82fc37fcc0992a94e4cecf53cb6d73f11749825bd98b \
+ --hash=sha256:2c1b21b44ac9beb0fc848d3993924147ba45c4ebc24be19825e57aabbe74a99e \
+ --hash=sha256:2df72ab12046a3496a92476020a1a0abf78b2a7db9ff4dc2036b8dd980203ae6 \
+ --hash=sha256:320ffd3de9699d3892048baee45ebfbbf9388a7d65d832d7e580243ade426d2b \
+ --hash=sha256:50e3b9a464d5d08cc5227413db0d1c4707b6172e4d4d915c1c70e4de0bbff1f5 \
+ --hash=sha256:5276db7ff62bb7b52f77f1f51ed58850e315154249aceb42e7f4c611f0f847ff \
+ --hash=sha256:61a6cf00dcb1a7f0c773ed4acc509cb636af2d6337a08f362413c76b2b47a8dd \
+ --hash=sha256:6ae6c4cb59f199d8827c5a07546b2ab7e85d262acaccaacd49b62f53f7c456f7 \
+ --hash=sha256:7661d401d60d8bf15bb5da39e4dd72f5d764c5aff5a86ef52a042506e3e970ff \
+ --hash=sha256:7bd527f36a605c914efca5d3d014170b2cb184723e423d26b1fb2fd9108e264d \
+ --hash=sha256:7cb54db3535c8686ea12e9535eb087d32421184eacc6939ef15ef50f83a5e7e2 \
+ --hash=sha256:7f3a2d740291f7f2c111d86a1c4851b70fb000a6c8883a59660d95ad57b9df35 \
+ --hash=sha256:81304b7d8e9c824d058087dcb89144842c8e0dea6d281c031f59f0acf66963d4 \
+ --hash=sha256:933947e8b4fbe617a51528b09851685138b49d511af0b6c0da2539115d6d4514 \
+ --hash=sha256:94223d7f060301b3a8c09c9b3bc3294b56b2188e7d8179c762a1cda72c979252 \
+ --hash=sha256:ab3ca49afcb47058393b0122428358d2fbe0408cf99f1b58b295cfeb4ed39109 \
+ --hash=sha256:bd6292f565ca46dee4e737ebcc20742e3b5be2b01556dafe169f6c65d088875f \
+ --hash=sha256:cb924aa3e4a3fb644d0c463cad5bc2572649a6a3f68a7f8e4fbe44aaa6d77e4c \
+ --hash=sha256:d0fc7a286feac9077ec52a927fc9fe8fe2fabab95426722be4c953c9a8bede92 \
+ --hash=sha256:ddc34786490a6e4ec0a855d401034cbd1242ef186c20d79d2166d6a4bd449577 \
+ --hash=sha256:e34b155e36fa9da7e1b7c738ed7767fc9491a62ec6af70fe9da4a057759edc2d \
+ --hash=sha256:e5b9e8f6bda48460b7b143c3821b21b452cb3a835e6bbd5dd33aa0c8d3f5137d \
+ --hash=sha256:e81ebf6c5ee9684be8f2c87563880f93eedd56dd2b6146d8a725b50b7e5adb0f \
+ --hash=sha256:eb91be369f945f10d3a49f5f9be8b3d0b93a4c2be8f8a5b83b0571b8123e0a7a \
+ --hash=sha256:f460d1ceb0e4a5dcb2a652db0904224f367c9b3c1470d5a7683c0480e582468b \
+ # via astroid
+mccabe==0.6.1 \
+ --hash=sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42 \
+ --hash=sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f \
+ # via pylint
+multidict==4.5.2 \
+ --hash=sha256:024b8129695a952ebd93373e45b5d341dbb87c17ce49637b34000093f243dd4f \
+ --hash=sha256:041e9442b11409be5e4fc8b6a97e4bcead758ab1e11768d1e69160bdde18acc3 \
+ --hash=sha256:045b4dd0e5f6121e6f314d81759abd2c257db4634260abcfe0d3f7083c4908ef \
+ --hash=sha256:047c0a04e382ef8bd74b0de01407e8d8632d7d1b4db6f2561106af812a68741b \
+ --hash=sha256:068167c2d7bbeebd359665ac4fff756be5ffac9cda02375b5c5a7c4777038e73 \
+ --hash=sha256:148ff60e0fffa2f5fad2eb25aae7bef23d8f3b8bdaf947a65cdbe84a978092bc \
+ --hash=sha256:1d1c77013a259971a72ddaa83b9f42c80a93ff12df6a4723be99d858fa30bee3 \
+ --hash=sha256:1d48bc124a6b7a55006d97917f695effa9725d05abe8ee78fd60d6588b8344cd \
+ --hash=sha256:31dfa2fc323097f8ad7acd41aa38d7c614dd1960ac6681745b6da124093dc351 \
+ --hash=sha256:34f82db7f80c49f38b032c5abb605c458bac997a6c3142e0d6c130be6fb2b941 \
+ --hash=sha256:3d5dd8e5998fb4ace04789d1d008e2bb532de501218519d70bb672c4c5a2fc5d \
+ --hash=sha256:4a6ae52bd3ee41ee0f3acf4c60ceb3f44e0e3bc52ab7da1c2b2aa6703363a3d1 \
+ --hash=sha256:4b02a3b2a2f01d0490dd39321c74273fed0568568ea0e7ea23e02bd1fb10a10b \
+ --hash=sha256:4b843f8e1dd6a3195679d9838eb4670222e8b8d01bc36c9894d6c3538316fa0a \
+ --hash=sha256:5de53a28f40ef3c4fd57aeab6b590c2c663de87a5af76136ced519923d3efbb3 \
+ --hash=sha256:61b2b33ede821b94fa99ce0b09c9ece049c7067a33b279f343adfe35108a4ea7 \
+ --hash=sha256:6a3a9b0f45fd75dc05d8e93dc21b18fc1670135ec9544d1ad4acbcf6b86781d0 \
+ --hash=sha256:76ad8e4c69dadbb31bad17c16baee61c0d1a4a73bed2590b741b2e1a46d3edd0 \
+ --hash=sha256:7ba19b777dc00194d1b473180d4ca89a054dd18de27d0ee2e42a103ec9b7d014 \
+ --hash=sha256:7c1b7eab7a49aa96f3db1f716f0113a8a2e93c7375dd3d5d21c4941f1405c9c5 \
+ --hash=sha256:7fc0eee3046041387cbace9314926aa48b681202f8897f8bff3809967a049036 \
+ --hash=sha256:8ccd1c5fff1aa1427100ce188557fc31f1e0a383ad8ec42c559aabd4ff08802d \
+ --hash=sha256:8e08dd76de80539d613654915a2f5196dbccc67448df291e69a88712ea21e24a \
+ --hash=sha256:c18498c50c59263841862ea0501da9f2b3659c00db54abfbf823a80787fde8ce \
+ --hash=sha256:c49db89d602c24928e68c0d510f4fcf8989d77defd01c973d6cbe27e684833b1 \
+ --hash=sha256:ce20044d0317649ddbb4e54dab3c1bcc7483c78c27d3f58ab3d0c7e6bc60d26a \
+ --hash=sha256:d1071414dd06ca2eafa90c85a079169bfeb0e5f57fd0b45d44c092546fcd6fd9 \
+ --hash=sha256:d3be11ac43ab1a3e979dac80843b42226d5d3cccd3986f2e03152720a4297cd7 \
+ --hash=sha256:db603a1c235d110c860d5f39988ebc8218ee028f07a7cbc056ba6424372ca31b \
+ # via yarl
+pyflakes==2.1.1 \
+ --hash=sha256:17dbeb2e3f4d772725c777fabc446d5634d1038f234e77343108ce445ea69ce0 \
+ --hash=sha256:d976835886f8c5b31d47970ed689944a0262b5f3afa00a5a7b4dc81e5449f8a2
+pygments==2.3.1 \
+ --hash=sha256:5ffada19f6203563680669ee7f53b64dabbeb100eb51b61996085e99c03b284a \
+ --hash=sha256:e8218dd399a61674745138520d0d4cf2621d7e032439341bc3f647bff125818d
+pylint==2.3.1 \
+ --hash=sha256:5d77031694a5fb97ea95e828c8d10fc770a1df6eb3906067aaed42201a8a6a09 \
+ --hash=sha256:723e3db49555abaf9bf79dc474c6b9e2935ad82230b10c1138a71ea41ac0fff1
+python-levenshtein==0.12.0 \
+ --hash=sha256:033a11de5e3d19ea25c9302d11224e1a1898fe5abd23c61c7c360c25195e3eb1
+pyyaml==5.1 \
+ --hash=sha256:1adecc22f88d38052fb787d959f003811ca858b799590a5eaa70e63dca50308c \
+ --hash=sha256:436bc774ecf7c103814098159fbb84c2715d25980175292c648f2da143909f95 \
+ --hash=sha256:460a5a4248763f6f37ea225d19d5c205677d8d525f6a83357ca622ed541830c2 \
+ --hash=sha256:5a22a9c84653debfbf198d02fe592c176ea548cccce47553f35f466e15cf2fd4 \
+ --hash=sha256:7a5d3f26b89d688db27822343dfa25c599627bc92093e788956372285c6298ad \
+ --hash=sha256:9372b04a02080752d9e6f990179a4ab840227c6e2ce15b95e1278456664cf2ba \
+ --hash=sha256:a5dcbebee834eaddf3fa7366316b880ff4062e4bcc9787b78c7fbb4a26ff2dd1 \
+ --hash=sha256:aee5bab92a176e7cd034e57f46e9df9a9862a71f8f37cad167c6fc74c65f5b4e \
+ --hash=sha256:c51f642898c0bacd335fc119da60baae0824f2cde95b0330b56c0553439f0673 \
+ --hash=sha256:c68ea4d3ba1705da1e0d85da6684ac657912679a649e8868bd850d2c299cce13 \
+ --hash=sha256:e23d0cc5299223dcc37885dae624f382297717e459ea24053709675a976a3e19 \
+ # via vcrpy
+six==1.12.0 \
+ --hash=sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c \
+ --hash=sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73 \
+ # via astroid, vcrpy
+typed-ast==1.3.4 ; python_version >= "3.0" and platform_python_implementation != "PyPy" \
+ --hash=sha256:04894d268ba6eab7e093d43107869ad49e7b5ef40d1a94243ea49b352061b200 \
+ --hash=sha256:16616ece19daddc586e499a3d2f560302c11f122b9c692bc216e821ae32aa0d0 \
+ --hash=sha256:252fdae740964b2d3cdfb3f84dcb4d6247a48a6abe2579e8029ab3be3cdc026c \
+ --hash=sha256:2af80a373af123d0b9f44941a46df67ef0ff7a60f95872412a145f4500a7fc99 \
+ --hash=sha256:2c88d0a913229a06282b285f42a31e063c3bf9071ff65c5ea4c12acb6977c6a7 \
+ --hash=sha256:2ea99c029ebd4b5a308d915cc7fb95b8e1201d60b065450d5d26deb65d3f2bc1 \
+ --hash=sha256:3d2e3ab175fc097d2a51c7a0d3fda442f35ebcc93bb1d7bd9b95ad893e44c04d \
+ --hash=sha256:4766dd695548a15ee766927bf883fb90c6ac8321be5a60c141f18628fb7f8da8 \
+ --hash=sha256:56b6978798502ef66625a2e0f80cf923da64e328da8bbe16c1ff928c70c873de \
+ --hash=sha256:5cddb6f8bce14325b2863f9d5ac5c51e07b71b462361fd815d1d7706d3a9d682 \
+ --hash=sha256:644ee788222d81555af543b70a1098f2025db38eaa99226f3a75a6854924d4db \
+ --hash=sha256:64cf762049fc4775efe6b27161467e76d0ba145862802a65eefc8879086fc6f8 \
+ --hash=sha256:68c362848d9fb71d3c3e5f43c09974a0ae319144634e7a47db62f0f2a54a7fa7 \
+ --hash=sha256:6c1f3c6f6635e611d58e467bf4371883568f0de9ccc4606f17048142dec14a1f \
+ --hash=sha256:b213d4a02eec4ddf622f4d2fbc539f062af3788d1f332f028a2e19c42da53f15 \
+ --hash=sha256:bb27d4e7805a7de0e35bd0cb1411bc85f807968b2b0539597a49a23b00a622ae \
+ --hash=sha256:c9d414512eaa417aadae7758bc118868cd2396b0e6138c1dd4fda96679c079d3 \
+ --hash=sha256:f0937165d1e25477b01081c4763d2d9cdc3b18af69cb259dd4f640c9b900fe5e \
+ --hash=sha256:fb96a6e2c11059ecf84e6741a319f93f683e440e341d4489c9b161eca251cf2a \
+ --hash=sha256:fc71d2d6ae56a091a8d94f33ec9d0f2001d1cb1db423d8b4355debfe9ce689b7
+vcrpy==2.0.1 \
+ --hash=sha256:127e79cf7b569d071d1bd761b83f7b62b2ce2a2eb63ceca7aa67cba8f2602ea3 \
+ --hash=sha256:57be64aa8e9883a4117d0b15de28af62275c001abcdb00b6dc2d4406073d9a4f
+wrapt==1.11.1 \
+ --hash=sha256:4aea003270831cceb8a90ff27c4031da6ead7ec1886023b80ce0dfe0adf61533 \
+ # via astroid, vcrpy
+yarl==1.3.0 \
+ --hash=sha256:024ecdc12bc02b321bc66b41327f930d1c2c543fa9a561b39861da9388ba7aa9 \
+ --hash=sha256:2f3010703295fbe1aec51023740871e64bb9664c789cba5a6bdf404e93f7568f \
+ --hash=sha256:3890ab952d508523ef4881457c4099056546593fa05e93da84c7250516e632eb \
+ --hash=sha256:3e2724eb9af5dc41648e5bb304fcf4891adc33258c6e14e2a7414ea32541e320 \
+ --hash=sha256:5badb97dd0abf26623a9982cd448ff12cb39b8e4c94032ccdedf22ce01a64842 \
+ --hash=sha256:73f447d11b530d860ca1e6b582f947688286ad16ca42256413083d13f260b7a0 \
+ --hash=sha256:7ab825726f2940c16d92aaec7d204cfc34ac26c0040da727cf8ba87255a33829 \
+ --hash=sha256:b25de84a8c20540531526dfbb0e2d2b648c13fd5dd126728c496d7c3fea33310 \
+ --hash=sha256:c6e341f5a6562af74ba55205dbd56d248daf1b5748ec48a0200ba227bb9e33f4 \
+ --hash=sha256:c9bb7c249c4432cd47e75af3864bc02d26c9594f49c82e2a28624417f0ae63b8 \
+ --hash=sha256:e060906c0c585565c718d1c3841747b61c5439af2211e185f6739a9412dfbde1 \
+ # via vcrpy
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/contrib/automation/linux-requirements.txt.in Mon Jul 22 14:00:33 2019 -0400
@@ -0,0 +1,12 @@
+# Bazaar doesn't work with Python 3 nor PyPy.
+bzr ; python_version <= '2.7' and platform_python_implementation == 'CPython'
+docutils
+fuzzywuzzy
+pyflakes
+pygments
+pylint
+# Needed to avoid warnings from fuzzywuzzy.
+python-Levenshtein
+# typed-ast dependency doesn't install on PyPy.
+typed-ast ; python_version >= '3.0' and platform_python_implementation != 'PyPy'
+vcrpy
--- a/contrib/automation/requirements.txt Tue Jul 09 10:07:35 2019 -0400
+++ b/contrib/automation/requirements.txt Mon Jul 22 14:00:33 2019 -0400
@@ -8,47 +8,68 @@
--hash=sha256:2f1adbb7546ed199e3c90ef23ec95c5cf3585bac7d11fb7eb562a3fe89c64e87 \
--hash=sha256:9d5c20441baf0cb60a4ac34cc447c6c189024b6b4c6cd7877034f4965c464e49 \
# via cryptography
-boto3==1.9.111 \
- --hash=sha256:06414c75d1f62af7d04fd652b38d1e4fd3cfd6b35bad978466af88e2aaecd00d \
- --hash=sha256:f3b77dff382374773d02411fa47ee408f4f503aeebd837fd9dc9ed8635bc5e8e
-botocore==1.12.111 \
- --hash=sha256:6af473c52d5e3e7ff82de5334e9fee96b2d5ec2df5d78bc00cd9937e2573a7a8 \
- --hash=sha256:9f5123c7be704b17aeacae99b5842ab17bda1f799dd29134de8c70e0a50a45d7 \
+bcrypt==3.1.6 \
+ --hash=sha256:0ba875eb67b011add6d8c5b76afbd92166e98b1f1efab9433d5dc0fafc76e203 \
+ --hash=sha256:21ed446054c93e209434148ef0b362432bb82bbdaf7beef70a32c221f3e33d1c \
+ --hash=sha256:28a0459381a8021f57230954b9e9a65bb5e3d569d2c253c5cac6cb181d71cf23 \
+ --hash=sha256:2aed3091eb6f51c26b7c2fad08d6620d1c35839e7a362f706015b41bd991125e \
+ --hash=sha256:2fa5d1e438958ea90eaedbf8082c2ceb1a684b4f6c75a3800c6ec1e18ebef96f \
+ --hash=sha256:3a73f45484e9874252002793518da060fb11eaa76c30713faa12115db17d1430 \
+ --hash=sha256:3e489787638a36bb466cd66780e15715494b6d6905ffdbaede94440d6d8e7dba \
+ --hash=sha256:44636759d222baa62806bbceb20e96f75a015a6381690d1bc2eda91c01ec02ea \
+ --hash=sha256:678c21b2fecaa72a1eded0cf12351b153615520637efcadc09ecf81b871f1596 \
+ --hash=sha256:75460c2c3786977ea9768d6c9d8957ba31b5fbeb0aae67a5c0e96aab4155f18c \
+ --hash=sha256:8ac06fb3e6aacb0a95b56eba735c0b64df49651c6ceb1ad1cf01ba75070d567f \
+ --hash=sha256:8fdced50a8b646fff8fa0e4b1c5fd940ecc844b43d1da5a980cb07f2d1b1132f \
+ --hash=sha256:9b2c5b640a2da533b0ab5f148d87fb9989bf9bcb2e61eea6a729102a6d36aef9 \
+ --hash=sha256:a9083e7fa9adb1a4de5ac15f9097eb15b04e2c8f97618f1b881af40abce382e1 \
+ --hash=sha256:b7e3948b8b1a81c5a99d41da5fb2dc03ddb93b5f96fcd3fd27e643f91efa33e1 \
+ --hash=sha256:b998b8ca979d906085f6a5d84f7b5459e5e94a13fc27c28a3514437013b6c2f6 \
+ --hash=sha256:dd08c50bc6f7be69cd7ba0769acca28c846ec46b7a8ddc2acf4b9ac6f8a7457e \
+ --hash=sha256:de5badee458544ab8125e63e39afeedfcf3aef6a6e2282ac159c95ae7472d773 \
+ --hash=sha256:ede2a87333d24f55a4a7338a6ccdccf3eaa9bed081d1737e0db4dbd1a4f7e6b6 \
+ # via paramiko
+boto3==1.9.137 \
+ --hash=sha256:882cc4869b47b51dae4b4a900769e72171ff00e0b6bca644b2d7a7ad7378f324 \
+ --hash=sha256:cd503a7e7a04f1c14d2801f9727159dfa88c393b4004e98940fa4aa205d920c8
+botocore==1.12.137 \
+ --hash=sha256:0d95794f6b1239c75e2c5f966221bcd4b68020fddb5676f757531eedbb612ed8 \
+ --hash=sha256:3213cf48cf2ceee10fc3b93221f2cd1c38521cca7584f547d5c086213cc60f35 \
# via boto3, s3transfer
certifi==2019.3.9 \
--hash=sha256:59b7658e26ca9c7339e00f8f4636cdfe59d34fa37b9b04f6f9e9926b3cece1a5 \
--hash=sha256:b26104d6835d1f5e49452a26eb2ff87fe7090b89dfcaee5ea2212697e1e1d7ae \
# via requests
-cffi==1.12.2 \
- --hash=sha256:00b97afa72c233495560a0793cdc86c2571721b4271c0667addc83c417f3d90f \
- --hash=sha256:0ba1b0c90f2124459f6966a10c03794082a2f3985cd699d7d63c4a8dae113e11 \
- --hash=sha256:0bffb69da295a4fc3349f2ec7cbe16b8ba057b0a593a92cbe8396e535244ee9d \
- --hash=sha256:21469a2b1082088d11ccd79dd84157ba42d940064abbfa59cf5f024c19cf4891 \
- --hash=sha256:2e4812f7fa984bf1ab253a40f1f4391b604f7fc424a3e21f7de542a7f8f7aedf \
- --hash=sha256:2eac2cdd07b9049dd4e68449b90d3ef1adc7c759463af5beb53a84f1db62e36c \
- --hash=sha256:2f9089979d7456c74d21303c7851f158833d48fb265876923edcb2d0194104ed \
- --hash=sha256:3dd13feff00bddb0bd2d650cdb7338f815c1789a91a6f68fdc00e5c5ed40329b \
- --hash=sha256:4065c32b52f4b142f417af6f33a5024edc1336aa845b9d5a8d86071f6fcaac5a \
- --hash=sha256:51a4ba1256e9003a3acf508e3b4f4661bebd015b8180cc31849da222426ef585 \
- --hash=sha256:59888faac06403767c0cf8cfb3f4a777b2939b1fbd9f729299b5384f097f05ea \
- --hash=sha256:59c87886640574d8b14910840327f5cd15954e26ed0bbd4e7cef95fa5aef218f \
- --hash=sha256:610fc7d6db6c56a244c2701575f6851461753c60f73f2de89c79bbf1cc807f33 \
- --hash=sha256:70aeadeecb281ea901bf4230c6222af0248c41044d6f57401a614ea59d96d145 \
- --hash=sha256:71e1296d5e66c59cd2c0f2d72dc476d42afe02aeddc833d8e05630a0551dad7a \
- --hash=sha256:8fc7a49b440ea752cfdf1d51a586fd08d395ff7a5d555dc69e84b1939f7ddee3 \
- --hash=sha256:9b5c2afd2d6e3771d516045a6cfa11a8da9a60e3d128746a7fe9ab36dfe7221f \
- --hash=sha256:9c759051ebcb244d9d55ee791259ddd158188d15adee3c152502d3b69005e6bd \
- --hash=sha256:b4d1011fec5ec12aa7cc10c05a2f2f12dfa0adfe958e56ae38dc140614035804 \
- --hash=sha256:b4f1d6332339ecc61275bebd1f7b674098a66fea11a00c84d1c58851e618dc0d \
- --hash=sha256:c030cda3dc8e62b814831faa4eb93dd9a46498af8cd1d5c178c2de856972fd92 \
- --hash=sha256:c2e1f2012e56d61390c0e668c20c4fb0ae667c44d6f6a2eeea5d7148dcd3df9f \
- --hash=sha256:c37c77d6562074452120fc6c02ad86ec928f5710fbc435a181d69334b4de1d84 \
- --hash=sha256:c8149780c60f8fd02752d0429246088c6c04e234b895c4a42e1ea9b4de8d27fb \
- --hash=sha256:cbeeef1dc3c4299bd746b774f019de9e4672f7cc666c777cd5b409f0b746dac7 \
- --hash=sha256:e113878a446c6228669144ae8a56e268c91b7f1fafae927adc4879d9849e0ea7 \
- --hash=sha256:e21162bf941b85c0cda08224dade5def9360f53b09f9f259adb85fc7dd0e7b35 \
- --hash=sha256:fb6934ef4744becbda3143d30c6604718871495a5e36c408431bf33d9c146889 \
- # via cryptography
+cffi==1.12.3 \
+ --hash=sha256:041c81822e9f84b1d9c401182e174996f0bae9991f33725d059b771744290774 \
+ --hash=sha256:046ef9a22f5d3eed06334d01b1e836977eeef500d9b78e9ef693f9380ad0b83d \
+ --hash=sha256:066bc4c7895c91812eff46f4b1c285220947d4aa46fa0a2651ff85f2afae9c90 \
+ --hash=sha256:066c7ff148ae33040c01058662d6752fd73fbc8e64787229ea8498c7d7f4041b \
+ --hash=sha256:2444d0c61f03dcd26dbf7600cf64354376ee579acad77aef459e34efcb438c63 \
+ --hash=sha256:300832850b8f7967e278870c5d51e3819b9aad8f0a2c8dbe39ab11f119237f45 \
+ --hash=sha256:34c77afe85b6b9e967bd8154e3855e847b70ca42043db6ad17f26899a3df1b25 \
+ --hash=sha256:46de5fa00f7ac09f020729148ff632819649b3e05a007d286242c4882f7b1dc3 \
+ --hash=sha256:4aa8ee7ba27c472d429b980c51e714a24f47ca296d53f4d7868075b175866f4b \
+ --hash=sha256:4d0004eb4351e35ed950c14c11e734182591465a33e960a4ab5e8d4f04d72647 \
+ --hash=sha256:4e3d3f31a1e202b0f5a35ba3bc4eb41e2fc2b11c1eff38b362de710bcffb5016 \
+ --hash=sha256:50bec6d35e6b1aaeb17f7c4e2b9374ebf95a8975d57863546fa83e8d31bdb8c4 \
+ --hash=sha256:55cad9a6df1e2a1d62063f79d0881a414a906a6962bc160ac968cc03ed3efcfb \
+ --hash=sha256:5662ad4e4e84f1eaa8efce5da695c5d2e229c563f9d5ce5b0113f71321bcf753 \
+ --hash=sha256:59b4dc008f98fc6ee2bb4fd7fc786a8d70000d058c2bbe2698275bc53a8d3fa7 \
+ --hash=sha256:73e1ffefe05e4ccd7bcea61af76f36077b914f92b76f95ccf00b0c1b9186f3f9 \
+ --hash=sha256:a1f0fd46eba2d71ce1589f7e50a9e2ffaeb739fb2c11e8192aa2b45d5f6cc41f \
+ --hash=sha256:a2e85dc204556657661051ff4bab75a84e968669765c8a2cd425918699c3d0e8 \
+ --hash=sha256:a5457d47dfff24882a21492e5815f891c0ca35fefae8aa742c6c263dac16ef1f \
+ --hash=sha256:a8dccd61d52a8dae4a825cdbb7735da530179fea472903eb871a5513b5abbfdc \
+ --hash=sha256:ae61af521ed676cf16ae94f30fe202781a38d7178b6b4ab622e4eec8cefaff42 \
+ --hash=sha256:b012a5edb48288f77a63dba0840c92d0504aa215612da4541b7b42d849bc83a3 \
+ --hash=sha256:d2c5cfa536227f57f97c92ac30c8109688ace8fa4ac086d19d0af47d134e2909 \
+ --hash=sha256:d42b5796e20aacc9d15e66befb7a345454eef794fdb0737d1af593447c6c8f45 \
+ --hash=sha256:dee54f5d30d775f525894d67b1495625dd9322945e7fee00731952e0368ff42d \
+ --hash=sha256:e070535507bd6aa07124258171be2ee8dfc19119c28ca94c9dfb7efd23564512 \
+ --hash=sha256:e1ff2748c84d97b065cc95429814cdba39bcbd77c9c85c89344b317dc0d9cbff \
+ --hash=sha256:ed851c75d1e0e043cbf5ca9a8e1b13c4c90f3fbd863dacb01c0808e2b5204201 \
+ # via bcrypt, cryptography, pynacl
chardet==3.0.4 \
--hash=sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae \
--hash=sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691 \
@@ -73,7 +94,7 @@
--hash=sha256:d4afbb0840f489b60f5a580a41a1b9c3622e08ecb5eec8614d4fb4cd914c4460 \
--hash=sha256:d9ed28030797c00f4bc43c86bf819266c76a5ea61d006cd4078a93ebf7da6bfd \
--hash=sha256:e603aa7bb52e4e8ed4119a58a03b60323918467ef209e6ff9db3ac382e5cf2c6 \
- # via pypsrp
+ # via paramiko, pypsrp
docutils==0.14 \
--hash=sha256:02aec4bd92ab067f6ff27a38a38a41173bf01bed8f89157768c1573f53e474a6 \
--hash=sha256:51e64ef2ebfb29cae1faa133b3710143496eca21c530f3f71424d77687764274 \
@@ -87,13 +108,41 @@
--hash=sha256:3720a4b1bd659dd2eecad0666459b9788813e032b83e7ba58578e48254e0a0e6 \
--hash=sha256:bde2aef6f44302dfb30320115b17d030798de8c4110e28d5cf6cf91a7a31074c \
# via boto3, botocore
-ntlm-auth==1.2.0 \
- --hash=sha256:7bc02a3fbdfee7275d3dc20fce8028ed8eb6d32364637f28be9e9ae9160c6d5c \
- --hash=sha256:9b13eaf88f16a831637d75236a93d60c0049536715aafbf8190ba58a590b023e \
+ntlm-auth==1.3.0 \
+ --hash=sha256:bb2fd03c665f0f62c5f65695b62dcdb07fb7a45df6ebc86c770be2054d6902dd \
+ --hash=sha256:ce5b4483ed761f341a538a426a71a52e5a9cf5fd834ebef1d2090f9eef14b3f8 \
# via pypsrp
+paramiko==2.4.2 \
+ --hash=sha256:3c16b2bfb4c0d810b24c40155dbfd113c0521e7e6ee593d704e84b4c658a1f3b \
+ --hash=sha256:a8975a7df3560c9f1e2b43dc54ebd40fd00a7017392ca5445ce7df409f900fcb
+pyasn1==0.4.5 \
+ --hash=sha256:da2420fe13a9452d8ae97a0e478adde1dee153b11ba832a95b223a2ba01c10f7 \
+ --hash=sha256:da6b43a8c9ae93bc80e2739efb38cc776ba74a886e3e9318d65fe81a8b8a2c6e \
+ # via paramiko
pycparser==2.19 \
--hash=sha256:a988718abfad80b6b157acce7bf130a30876d27603738ac39f140993246b25b3 \
# via cffi
+pynacl==1.3.0 \
+ --hash=sha256:05c26f93964373fc0abe332676cb6735f0ecad27711035b9472751faa8521255 \
+ --hash=sha256:0c6100edd16fefd1557da078c7a31e7b7d7a52ce39fdca2bec29d4f7b6e7600c \
+ --hash=sha256:0d0a8171a68edf51add1e73d2159c4bc19fc0718e79dec51166e940856c2f28e \
+ --hash=sha256:1c780712b206317a746ace34c209b8c29dbfd841dfbc02aa27f2084dd3db77ae \
+ --hash=sha256:2424c8b9f41aa65bbdbd7a64e73a7450ebb4aa9ddedc6a081e7afcc4c97f7621 \
+ --hash=sha256:2d23c04e8d709444220557ae48ed01f3f1086439f12dbf11976e849a4926db56 \
+ --hash=sha256:30f36a9c70450c7878053fa1344aca0145fd47d845270b43a7ee9192a051bf39 \
+ --hash=sha256:37aa336a317209f1bb099ad177fef0da45be36a2aa664507c5d72015f956c310 \
+ --hash=sha256:4943decfc5b905748f0756fdd99d4f9498d7064815c4cf3643820c9028b711d1 \
+ --hash=sha256:57ef38a65056e7800859e5ba9e6091053cd06e1038983016effaffe0efcd594a \
+ --hash=sha256:5bd61e9b44c543016ce1f6aef48606280e45f892a928ca7068fba30021e9b786 \
+ --hash=sha256:6482d3017a0c0327a49dddc8bd1074cc730d45db2ccb09c3bac1f8f32d1eb61b \
+ --hash=sha256:7d3ce02c0784b7cbcc771a2da6ea51f87e8716004512493a2b69016326301c3b \
+ --hash=sha256:a14e499c0f5955dcc3991f785f3f8e2130ed504fa3a7f44009ff458ad6bdd17f \
+ --hash=sha256:a39f54ccbcd2757d1d63b0ec00a00980c0b382c62865b61a505163943624ab20 \
+ --hash=sha256:aabb0c5232910a20eec8563503c153a8e78bbf5459490c49ab31f6adf3f3a415 \
+ --hash=sha256:bd4ecb473a96ad0f90c20acba4f0bf0df91a4e03a1f4dd6a4bdc9ca75aa3a715 \
+ --hash=sha256:e2da3c13307eac601f3de04887624939aca8ee3c9488a0bb0eca4fb9401fc6b1 \
+ --hash=sha256:f67814c38162f4deb31f68d590771a29d5ae3b1bd64b75cf232308e5c74777e0 \
+ # via paramiko
pypsrp==0.3.1 \
--hash=sha256:309853380fe086090a03cc6662a778ee69b1cae355ae4a932859034fd76e9d0b \
--hash=sha256:90f946254f547dc3493cea8493c819ab87e152a755797c93aa2668678ba8ae85
@@ -112,8 +161,8 @@
six==1.12.0 \
--hash=sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c \
--hash=sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73 \
- # via cryptography, pypsrp, python-dateutil
-urllib3==1.24.1 \
- --hash=sha256:61bf29cada3fc2fbefad4fdf059ea4bd1b4a86d2b6d15e1c7c0b582b9752fe39 \
- --hash=sha256:de9529817c93f27c8ccbfead6985011db27bd0ddfcdb2d86f3f663385c6a9c22 \
+ # via bcrypt, cryptography, pynacl, pypsrp, python-dateutil
+urllib3==1.24.2 \
+ --hash=sha256:4c291ca23bbb55c76518905869ef34bdd5f0e46af7afe6861e8375643ffee1a0 \
+ --hash=sha256:9a247273df709c4fedb38c711e44292304f73f39ab01beda9f6b9fc375669ac3 \
# via botocore, requests
--- a/contrib/automation/requirements.txt.in Tue Jul 09 10:07:35 2019 -0400
+++ b/contrib/automation/requirements.txt.in Mon Jul 22 14:00:33 2019 -0400
@@ -1,2 +1,3 @@
boto3
+paramiko
pypsrp
--- a/contrib/byteify-strings.py Tue Jul 09 10:07:35 2019 -0400
+++ b/contrib/byteify-strings.py Mon Jul 22 14:00:33 2019 -0400
@@ -7,7 +7,7 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
+from __future__ import absolute_import, print_function
import argparse
import contextlib
@@ -227,4 +227,7 @@
process(fin, fout, opts)
if __name__ == '__main__':
+ if sys.version_info.major < 3:
+ print('This script must be run under Python 3.')
+ sys.exit(3)
main()
--- a/contrib/catapipe.py Tue Jul 09 10:07:35 2019 -0400
+++ b/contrib/catapipe.py Mon Jul 22 14:00:33 2019 -0400
@@ -44,6 +44,7 @@
_TYPEMAP = {
'START': 'B',
'END': 'E',
+ 'COUNTER': 'C',
}
_threadmap = {}
@@ -78,6 +79,11 @@
verb, session, label = ev.split(' ', 2)
if session not in _threadmap:
_threadmap[session] = len(_threadmap)
+ if verb == 'COUNTER':
+ amount, label = label.split(' ', 1)
+ payload_args = {'value': int(amount)}
+ else:
+ payload_args = {}
pid = _threadmap[session]
ts_micros = (now - start) * 1000000
out.write(json.dumps(
@@ -88,7 +94,7 @@
"ts": ts_micros,
"pid": pid,
"tid": 1,
- "args": {}
+ "args": payload_args,
}))
out.write(',\n')
finally:
--- a/contrib/import-checker.py Tue Jul 09 10:07:35 2019 -0400
+++ b/contrib/import-checker.py Mon Jul 22 14:00:33 2019 -0400
@@ -649,15 +649,15 @@
... print("%s %s %d" % (_forcestr(m), _forcestr(f), l))
... print(repr(_forcestr(s)))
>>> lines = [
- ... b'comment',
- ... b' >>> from __future__ import print_function',
- ... b" >>> ' multiline",
- ... b" ... string'",
- ... b' ',
- ... b'comment',
- ... b' $ cat > foo.py <<EOF',
- ... b' > from __future__ import print_function',
- ... b' > EOF',
+ ... 'comment',
+ ... ' >>> from __future__ import print_function',
+ ... " >>> ' multiline",
+ ... " ... string'",
+ ... ' ',
+ ... 'comment',
+ ... ' $ cat > foo.py <<EOF',
+ ... ' > from __future__ import print_function',
+ ... ' > EOF',
... ]
>>> test(b"example.t", lines)
example[2] doctest.py 1
@@ -694,7 +694,7 @@
yield src.read(), modname, f, 0
py = True
if py or f.endswith('.t'):
- with open(f, 'rb') as src:
+ with open(f, 'r') as src:
for script, modname, t, line in embedded(f, modname, src):
yield script, modname.encode('utf8'), t, line
--- a/contrib/packaging/inno/readme.rst Tue Jul 09 10:07:35 2019 -0400
+++ b/contrib/packaging/inno/readme.rst Mon Jul 22 14:00:33 2019 -0400
@@ -32,7 +32,7 @@
``cd c:\src\hg``.
Next, invoke ``build.py`` to produce an Inno installer. You will
-need to supply the path to the Python interpreter to use.:
+need to supply the path to the Python interpreter to use.::
$ python3.exe contrib\packaging\inno\build.py \
--python c:\python27\python.exe
--- a/contrib/packaging/wix/help.wxs Tue Jul 09 10:07:35 2019 -0400
+++ b/contrib/packaging/wix/help.wxs Mon Jul 22 14:00:33 2019 -0400
@@ -49,6 +49,7 @@
<File Id="internals.config.txt" Name="config.txt" />
<File Id="internals.extensions.txt" Name="extensions.txt" />
<File Id="internals.linelog.txt" Name="linelog.txt" />
+ <File Id="internals.mergestate.txt" Name="mergestate.txt" />
<File Id="internals.requirements.txt" Name="requirements.txt" />
<File Id="internals.revlogs.txt" Name="revlogs.txt" />
<File Id="internals.wireprotocol.txt" Name="wireprotocol.txt" />
--- a/contrib/perf.py Tue Jul 09 10:07:35 2019 -0400
+++ b/contrib/perf.py Mon Jul 22 14:00:33 2019 -0400
@@ -15,6 +15,13 @@
``presleep``
number of second to wait before any group of runs (default: 1)
+``pre-run``
+ number of run to perform before starting measurement.
+
+``profile-benchmark``
+ Enable profiling for the benchmarked section.
+ (The first iteration is benchmarked)
+
``run-limits``
Control the number of runs each benchmark will perform. The option value
should be a list of `<time>-<numberofrun>` pairs. After each run the
@@ -106,6 +113,10 @@
except ImportError:
pass
+try:
+ from mercurial import profiling
+except ImportError:
+ profiling = None
def identity(a):
return a
@@ -240,6 +251,12 @@
configitem(b'perf', b'all-timing',
default=mercurial.configitems.dynamicdefault,
)
+ configitem(b'perf', b'pre-run',
+ default=mercurial.configitems.dynamicdefault,
+ )
+ configitem(b'perf', b'profile-benchmark',
+ default=mercurial.configitems.dynamicdefault,
+ )
configitem(b'perf', b'run-limits',
default=mercurial.configitems.dynamicdefault,
)
@@ -251,6 +268,15 @@
return lambda x: 1
return len
+class noop(object):
+ """dummy context manager"""
+ def __enter__(self):
+ pass
+ def __exit__(self, *args):
+ pass
+
+NOOPCTX = noop()
+
def gettimer(ui, opts=None):
"""return a timer function and formatter: (timer, formatter)
@@ -341,7 +367,14 @@
if not limits:
limits = DEFAULTLIMITS
- t = functools.partial(_timer, fm, displayall=displayall, limits=limits)
+ profiler = None
+ if profiling is not None:
+ if ui.configbool(b"perf", b"profile-benchmark", False):
+ profiler = profiling.profile(ui)
+
+ prerun = getint(ui, b"perf", b"pre-run", 0)
+ t = functools.partial(_timer, fm, displayall=displayall, limits=limits,
+ prerun=prerun, profiler=profiler)
return t, fm
def stub_timer(fm, func, setup=None, title=None):
@@ -368,17 +401,25 @@
)
def _timer(fm, func, setup=None, title=None, displayall=False,
- limits=DEFAULTLIMITS):
+ limits=DEFAULTLIMITS, prerun=0, profiler=None):
gc.collect()
results = []
begin = util.timer()
count = 0
+ if profiler is None:
+ profiler = NOOPCTX
+ for i in range(prerun):
+ if setup is not None:
+ setup()
+ func()
keepgoing = True
while keepgoing:
if setup is not None:
setup()
- with timeone() as item:
- r = func()
+ with profiler:
+ with timeone() as item:
+ r = func()
+ profiler = NOOPCTX
count += 1
results.append(item[0])
cstop = util.timer()
@@ -922,17 +963,39 @@
timer(d)
fm.end()
+def _getmergerevs(repo, opts):
+ """parse command argument to return rev involved in merge
+
+ input: options dictionnary with `rev`, `from` and `bse`
+ output: (localctx, otherctx, basectx)
+ """
+ if opts[b'from']:
+ fromrev = scmutil.revsingle(repo, opts[b'from'])
+ wctx = repo[fromrev]
+ else:
+ wctx = repo[None]
+ # we don't want working dir files to be stat'd in the benchmark, so
+ # prime that cache
+ wctx.dirty()
+ rctx = scmutil.revsingle(repo, opts[b'rev'], opts[b'rev'])
+ if opts[b'base']:
+ fromrev = scmutil.revsingle(repo, opts[b'base'])
+ ancestor = repo[fromrev]
+ else:
+ ancestor = wctx.ancestor(rctx)
+ return (wctx, rctx, ancestor)
+
@command(b'perfmergecalculate',
- [(b'r', b'rev', b'.', b'rev to merge against')] + formatteropts)
-def perfmergecalculate(ui, repo, rev, **opts):
+ [
+ (b'r', b'rev', b'.', b'rev to merge against'),
+ (b'', b'from', b'', b'rev to merge from'),
+ (b'', b'base', b'', b'the revision to use as base'),
+ ] + formatteropts)
+def perfmergecalculate(ui, repo, **opts):
opts = _byteskwargs(opts)
timer, fm = gettimer(ui, opts)
- wctx = repo[None]
- rctx = scmutil.revsingle(repo, rev, rev)
- ancestor = wctx.ancestor(rctx)
- # we don't want working dir files to be stat'd in the benchmark, so prime
- # that cache
- wctx.dirty()
+
+ wctx, rctx, ancestor = _getmergerevs(repo, opts)
def d():
# acceptremote is True because we don't want prompts in the middle of
# our benchmark
@@ -941,6 +1004,24 @@
timer(d)
fm.end()
+@command(b'perfmergecopies',
+ [
+ (b'r', b'rev', b'.', b'rev to merge against'),
+ (b'', b'from', b'', b'rev to merge from'),
+ (b'', b'base', b'', b'the revision to use as base'),
+ ] + formatteropts)
+def perfmergecopies(ui, repo, **opts):
+ """measure runtime of `copies.mergecopies`"""
+ opts = _byteskwargs(opts)
+ timer, fm = gettimer(ui, opts)
+ wctx, rctx, ancestor = _getmergerevs(repo, opts)
+ def d():
+ # acceptremote is True because we don't want prompts in the middle of
+ # our benchmark
+ copies.mergecopies(repo, wctx, rctx, ancestor)
+ timer(d)
+ fm.end()
+
@command(b'perfpathcopies', [], b"REV REV")
def perfpathcopies(ui, repo, rev1, rev2, **opts):
"""benchmark the copy tracing logic"""
@@ -1390,6 +1471,111 @@
timer(format)
fm.end()
+@command(b'perfhelper-mergecopies', formatteropts +
+ [
+ (b'r', b'revs', [], b'restrict search to these revisions'),
+ (b'', b'timing', False, b'provides extra data (costly)'),
+ ])
+def perfhelpermergecopies(ui, repo, revs=[], **opts):
+ """find statistics about potential parameters for `perfmergecopies`
+
+ This command find (base, p1, p2) triplet relevant for copytracing
+ benchmarking in the context of a merge. It reports values for some of the
+ parameters that impact merge copy tracing time during merge.
+
+ If `--timing` is set, rename detection is run and the associated timing
+ will be reported. The extra details come at the cost of slower command
+ execution.
+
+ Since rename detection is only run once, other factors might easily
+ affect the precision of the timing. However it should give a good
+ approximation of which revision triplets are very costly.
+ """
+ opts = _byteskwargs(opts)
+ fm = ui.formatter(b'perf', opts)
+ dotiming = opts[b'timing']
+
+ output_template = [
+ ("base", "%(base)12s"),
+ ("p1", "%(p1.node)12s"),
+ ("p2", "%(p2.node)12s"),
+ ("p1.nb-revs", "%(p1.nbrevs)12d"),
+ ("p1.nb-files", "%(p1.nbmissingfiles)12d"),
+ ("p1.renames", "%(p1.renamedfiles)12d"),
+ ("p1.time", "%(p1.time)12.3f"),
+ ("p2.nb-revs", "%(p2.nbrevs)12d"),
+ ("p2.nb-files", "%(p2.nbmissingfiles)12d"),
+ ("p2.renames", "%(p2.renamedfiles)12d"),
+ ("p2.time", "%(p2.time)12.3f"),
+ ("renames", "%(nbrenamedfiles)12d"),
+ ("total.time", "%(time)12.3f"),
+ ]
+ if not dotiming:
+ output_template = [i for i in output_template
+ if not ('time' in i[0] or 'renames' in i[0])]
+ header_names = [h for (h, v) in output_template]
+ output = ' '.join([v for (h, v) in output_template]) + '\n'
+ header = ' '.join(['%12s'] * len(header_names)) + '\n'
+ fm.plain(header % tuple(header_names))
+
+ if not revs:
+ revs = ['all()']
+ revs = scmutil.revrange(repo, revs)
+
+ roi = repo.revs('merge() and %ld', revs)
+ for r in roi:
+ ctx = repo[r]
+ p1 = ctx.p1()
+ p2 = ctx.p2()
+ bases = repo.changelog._commonancestorsheads(p1.rev(), p2.rev())
+ for b in bases:
+ b = repo[b]
+ p1missing = copies._computeforwardmissing(b, p1)
+ p2missing = copies._computeforwardmissing(b, p2)
+ data = {
+ b'base': b.hex(),
+ b'p1.node': p1.hex(),
+ b'p1.nbrevs': len(repo.revs('%d::%d', b.rev(), p1.rev())),
+ b'p1.nbmissingfiles': len(p1missing),
+ b'p2.node': p2.hex(),
+ b'p2.nbrevs': len(repo.revs('%d::%d', b.rev(), p2.rev())),
+ b'p2.nbmissingfiles': len(p2missing),
+ }
+ if dotiming:
+ begin = util.timer()
+ mergedata = copies.mergecopies(repo, p1, p2, b)
+ end = util.timer()
+ # not very stable timing since we did only one run
+ data['time'] = end - begin
+ # mergedata contains five dicts: "copy", "movewithdir",
+ # "diverge", "renamedelete" and "dirmove".
+ # The first 4 are about renamed file so lets count that.
+ renames = len(mergedata[0])
+ renames += len(mergedata[1])
+ renames += len(mergedata[2])
+ renames += len(mergedata[3])
+ data['nbrenamedfiles'] = renames
+ begin = util.timer()
+ p1renames = copies.pathcopies(b, p1)
+ end = util.timer()
+ data['p1.time'] = end - begin
+ begin = util.timer()
+ p2renames = copies.pathcopies(b, p2)
+ data['p2.time'] = end - begin
+ end = util.timer()
+ data['p1.renamedfiles'] = len(p1renames)
+ data['p2.renamedfiles'] = len(p2renames)
+ fm.startitem()
+ fm.data(**data)
+ # make node pretty for the human output
+ out = data.copy()
+ out['base'] = fm.hexfunc(b.node())
+ out['p1.node'] = fm.hexfunc(p1.node())
+ out['p2.node'] = fm.hexfunc(p2.node())
+ fm.plain(output % out)
+
+ fm.end()
+
@command(b'perfhelper-pathcopies', formatteropts +
[
(b'r', b'revs', [], b'restrict search to these revisions'),
@@ -1890,7 +2076,7 @@
@command(b'perfrevlogwrite', revlogopts + formatteropts +
[(b's', b'startrev', 1000, b'revision to start writing at'),
(b'', b'stoprev', -1, b'last revision to write'),
- (b'', b'count', 3, b'last revision to write'),
+ (b'', b'count', 3, b'number of passes to perform'),
(b'', b'details', False, b'print timing for every revisions tested'),
(b'', b'source', b'full', b'the kind of data feed in the revlog'),
(b'', b'lazydeltabase', True, b'try the provided delta first'),
@@ -1907,6 +2093,16 @@
(use a delta from the first parent otherwise)
* `parent-smallest`: add from the smallest delta (either p1 or p2)
* `storage`: add from the existing precomputed deltas
+
+ Note: This performance command measures performance in a custom way. As a
+ result some of the global configuration of the 'perf' command does not
+ apply to it:
+
+ * ``pre-run``: disabled
+
+ * ``profile-benchmark``: disabled
+
+ * ``run-limits``: disabled use --count instead
"""
opts = _byteskwargs(opts)
@@ -2081,6 +2277,10 @@
if orig._inline:
raise error.Abort('not supporting inline revlog (yet)')
+ revlogkwargs = {}
+ k = 'upperboundcomp'
+ if util.safehasattr(orig, k):
+ revlogkwargs[k] = getattr(orig, k)
origindexpath = orig.opener.join(orig.indexfile)
origdatapath = orig.opener.join(orig.datafile)
@@ -2112,7 +2312,7 @@
dest = revlog.revlog(vfs,
indexfile=indexname,
- datafile=dataname)
+ datafile=dataname, **revlogkwargs)
if dest._inline:
raise error.Abort('not supporting inline revlog (yet)')
# make sure internals are initialized
--- a/contrib/python3-whitelist Tue Jul 09 10:07:35 2019 -0400
+++ b/contrib/python3-whitelist Mon Jul 22 14:00:33 2019 -0400
@@ -4,6 +4,7 @@
test-absorb-phase.t
test-absorb-rename.t
test-absorb-strip.t
+test-absorb-unfinished.t
test-absorb.t
test-acl.t
test-add.t
@@ -35,6 +36,7 @@
test-bisect3.t
test-blackbox.t
test-bookflow.t
+test-bookmarks-corner-case.t
test-bookmarks-current.t
test-bookmarks-merge.t
test-bookmarks-pushpull.t
@@ -104,6 +106,7 @@
test-contrib-check-code.t
test-contrib-check-commit.t
test-contrib-dumprevlog.t
+test-contrib-emacs.t
test-contrib-perf.t
test-contrib-relnotes.t
test-contrib-testparseutil.t
@@ -126,6 +129,8 @@
test-convert-svn-sink.t
test-convert-tagsbranch-topology.t
test-convert.t
+test-copies-in-changeset.t
+test-copies-unrelated.t
test-copies.t
test-copy-move-merge.t
test-copy.t
@@ -139,6 +144,7 @@
test-debugrename.t
test-default-push.t
test-demandimport.py
+test-devel-warnings.t
test-diff-antipatience.t
test-diff-binary-file.t
test-diff-change.t
@@ -159,6 +165,7 @@
test-dirstate-backup.t
test-dirstate-nonnormalset.t
test-dirstate-race.t
+test-dirstate-race2.t
test-dirstate.t
test-dispatch.py
test-dispatch.t
@@ -230,6 +237,7 @@
test-filelog.py
test-fileset-generated.t
test-fileset.t
+test-fix-metadata.t
test-fix-topology.t
test-fix.t
test-flagprocessor.t
@@ -511,6 +519,7 @@
test-pathconflicts-merge.t
test-pathconflicts-update.t
test-pathencode.py
+test-paths.t
test-pending.t
test-permissions.t
test-phabricator.t
@@ -597,6 +606,7 @@
test-releasenotes-merging.t
test-releasenotes-parsing.t
test-relink.t
+test-remote-hidden.t
test-remotefilelog-bad-configs.t
test-remotefilelog-bgprefetch.t
test-remotefilelog-blame.t
@@ -658,10 +668,12 @@
test-run-tests.py
test-run-tests.t
test-rust-ancestor.py
+test-rust-discovery.py
test-schemes.t
test-serve.t
test-server-view.t
test-setdiscovery.t
+test-share-bookmarks.t
test-share.t
test-shelve.t
test-shelve2.t
--- a/contrib/testparseutil.py Tue Jul 09 10:07:35 2019 -0400
+++ b/contrib/testparseutil.py Mon Jul 22 14:00:33 2019 -0400
@@ -38,12 +38,6 @@
if ispy3:
import builtins
- # TODO: .buffer might not exist if std streams were replaced; we'll need
- # a silly wrapper to make a bytes stream backed by a unicode one.
- stdin = sys.stdin.buffer
- stdout = sys.stdout.buffer
- stderr = sys.stderr.buffer
-
def bytestr(s):
# tiny version of pycompat.bytestr
return s.encode('latin1')
@@ -54,12 +48,8 @@
return s.decode(u'latin-1')
def opentext(f):
- return open(f, 'rb')
+ return open(f, 'r')
else:
- stdin = sys.stdin
- stdout = sys.stdout
- stderr = sys.stderr
-
bytestr = str
sysstr = identity
@@ -71,11 +61,11 @@
def writeout(data):
# write "data" in BYTES into stdout
- stdout.write(data)
+ sys.stdout.write(data)
def writeerr(data):
# write "data" in BYTES into stderr
- stderr.write(data)
+ sys.stderr.write(data)
####################
@@ -164,14 +154,14 @@
... self.matchfunc = matchfunc
... def startsat(self, line):
... return self.matchfunc(line)
- >>> ambig1 = ambigmatcher(b'ambiguous #1',
- ... lambda l: l.startswith(b' $ cat '))
- >>> ambig2 = ambigmatcher(b'ambiguous #2',
- ... lambda l: l.endswith(b'<< EOF\\n'))
- >>> lines = [b' $ cat > foo.py << EOF\\n']
+ >>> ambig1 = ambigmatcher('ambiguous #1',
+ ... lambda l: l.startswith(' $ cat '))
+ >>> ambig2 = ambigmatcher('ambiguous #2',
+ ... lambda l: l.endswith('<< EOF\\n'))
+ >>> lines = [' $ cat > foo.py << EOF\\n']
>>> errors = []
>>> matchers = [ambig1, ambig2]
- >>> list(t for t in embedded(b'<dummy>', lines, errors, matchers))
+ >>> list(t for t in embedded('<dummy>', lines, errors, matchers))
[]
>>> b2s(errors)
['<dummy>:1: ambiguous line for "ambiguous #1", "ambiguous #2"']
@@ -181,21 +171,21 @@
ctx = filename = code = startline = None # for pyflakes
for lineno, line in enumerate(lines, 1):
- if not line.endswith(b'\n'):
- line += b'\n' # to normalize EOF line
+ if not line.endswith('\n'):
+ line += '\n' # to normalize EOF line
if matcher: # now, inside embedded code
if matcher.endsat(ctx, line):
codeatend = matcher.codeatend(ctx, line)
if codeatend is not None:
code.append(codeatend)
if not matcher.ignores(ctx):
- yield (filename, startline, lineno, b''.join(code))
+ yield (filename, startline, lineno, ''.join(code))
matcher = None
# DO NOT "continue", because line might start next fragment
elif not matcher.isinside(ctx, line):
# this is an error of basefile
# (if matchers are implemented correctly)
- errors.append(b'%s:%d: unexpected line for "%s"'
+ errors.append('%s:%d: unexpected line for "%s"'
% (basefile, lineno, matcher.desc))
# stop extracting embedded code by current 'matcher',
# because appearance of unexpected line might mean
@@ -218,9 +208,9 @@
if matched:
if len(matched) > 1:
# this is an error of matchers, maybe
- errors.append(b'%s:%d: ambiguous line for %s' %
+ errors.append('%s:%d: ambiguous line for %s' %
(basefile, lineno,
- b', '.join([b'"%s"' % m.desc
+ ', '.join(['"%s"' % m.desc
for m, c in matched])))
# omit extracting embedded code, because choosing
# arbitrary matcher from matched ones might fail to
@@ -239,20 +229,20 @@
if matcher:
# examine whether EOF ends embedded code, because embedded
# code isn't yet ended explicitly
- if matcher.endsat(ctx, b'\n'):
- codeatend = matcher.codeatend(ctx, b'\n')
+ if matcher.endsat(ctx, '\n'):
+ codeatend = matcher.codeatend(ctx, '\n')
if codeatend is not None:
code.append(codeatend)
if not matcher.ignores(ctx):
- yield (filename, startline, lineno + 1, b''.join(code))
+ yield (filename, startline, lineno + 1, ''.join(code))
else:
# this is an error of basefile
# (if matchers are implemented correctly)
- errors.append(b'%s:%d: unexpected end of file for "%s"'
+ errors.append('%s:%d: unexpected end of file for "%s"'
% (basefile, lineno, matcher.desc))
# heredoc limit mark to ignore embedded code at check-code.py or so
-heredocignorelimit = b'NO_CHECK_EOF'
+heredocignorelimit = 'NO_CHECK_EOF'
# the pattern to match against cases below, and to return a limit mark
# string as 'lname' group
@@ -260,47 +250,47 @@
# - << LIMITMARK
# - << "LIMITMARK"
# - << 'LIMITMARK'
-heredoclimitpat = br'\s*<<\s*(?P<lquote>["\']?)(?P<limit>\w+)(?P=lquote)'
+heredoclimitpat = r'\s*<<\s*(?P<lquote>["\']?)(?P<limit>\w+)(?P=lquote)'
class fileheredocmatcher(embeddedmatcher):
"""Detect "cat > FILE << LIMIT" style embedded code
- >>> matcher = fileheredocmatcher(b'heredoc .py file', br'[^<]+\\.py')
- >>> b2s(matcher.startsat(b' $ cat > file.py << EOF\\n'))
+ >>> matcher = fileheredocmatcher('heredoc .py file', r'[^<]+\\.py')
+ >>> b2s(matcher.startsat(' $ cat > file.py << EOF\\n'))
('file.py', ' > EOF\\n')
- >>> b2s(matcher.startsat(b' $ cat >>file.py <<EOF\\n'))
+ >>> b2s(matcher.startsat(' $ cat >>file.py <<EOF\\n'))
('file.py', ' > EOF\\n')
- >>> b2s(matcher.startsat(b' $ cat> \\x27any file.py\\x27<< "EOF"\\n'))
+ >>> b2s(matcher.startsat(' $ cat> \\x27any file.py\\x27<< "EOF"\\n'))
('any file.py', ' > EOF\\n')
- >>> b2s(matcher.startsat(b" $ cat > file.py << 'ANYLIMIT'\\n"))
+ >>> b2s(matcher.startsat(" $ cat > file.py << 'ANYLIMIT'\\n"))
('file.py', ' > ANYLIMIT\\n')
- >>> b2s(matcher.startsat(b' $ cat<<ANYLIMIT>"file.py"\\n'))
+ >>> b2s(matcher.startsat(' $ cat<<ANYLIMIT>"file.py"\\n'))
('file.py', ' > ANYLIMIT\\n')
- >>> start = b' $ cat > file.py << EOF\\n'
+ >>> start = ' $ cat > file.py << EOF\\n'
>>> ctx = matcher.startsat(start)
>>> matcher.codeatstart(ctx, start)
>>> b2s(matcher.filename(ctx))
'file.py'
>>> matcher.ignores(ctx)
False
- >>> inside = b' > foo = 1\\n'
+ >>> inside = ' > foo = 1\\n'
>>> matcher.endsat(ctx, inside)
False
>>> matcher.isinside(ctx, inside)
True
>>> b2s(matcher.codeinside(ctx, inside))
'foo = 1\\n'
- >>> end = b' > EOF\\n'
+ >>> end = ' > EOF\\n'
>>> matcher.endsat(ctx, end)
True
>>> matcher.codeatend(ctx, end)
- >>> matcher.endsat(ctx, b' > EOFEOF\\n')
+ >>> matcher.endsat(ctx, ' > EOFEOF\\n')
False
- >>> ctx = matcher.startsat(b' $ cat > file.py << NO_CHECK_EOF\\n')
+ >>> ctx = matcher.startsat(' $ cat > file.py << NO_CHECK_EOF\\n')
>>> matcher.ignores(ctx)
True
"""
- _prefix = b' > '
+ _prefix = ' > '
def __init__(self, desc, namepat):
super(fileheredocmatcher, self).__init__(desc)
@@ -312,13 +302,13 @@
# - > NAMEPAT
# - > "NAMEPAT"
# - > 'NAMEPAT'
- namepat = (br'\s*>>?\s*(?P<nquote>["\']?)(?P<name>%s)(?P=nquote)'
+ namepat = (r'\s*>>?\s*(?P<nquote>["\']?)(?P<name>%s)(?P=nquote)'
% namepat)
self._fileres = [
# "cat > NAME << LIMIT" case
- re.compile(br' \$ \s*cat' + namepat + heredoclimitpat),
+ re.compile(r' \$ \s*cat' + namepat + heredoclimitpat),
# "cat << LIMIT > NAME" case
- re.compile(br' \$ \s*cat' + heredoclimitpat + namepat),
+ re.compile(r' \$ \s*cat' + heredoclimitpat + namepat),
]
def startsat(self, line):
@@ -327,7 +317,7 @@
matched = filere.match(line)
if matched:
return (matched.group('name'),
- b' > %s\n' % matched.group('limit'))
+ ' > %s\n' % matched.group('limit'))
def endsat(self, ctx, line):
return ctx[1] == line
@@ -336,7 +326,7 @@
return line.startswith(self._prefix)
def ignores(self, ctx):
- return b' > %s\n' % heredocignorelimit == ctx[1]
+ return ' > %s\n' % heredocignorelimit == ctx[1]
def filename(self, ctx):
return ctx[0]
@@ -357,10 +347,10 @@
"""Detect ">>> code" style embedded python code
>>> matcher = pydoctestmatcher()
- >>> startline = b' >>> foo = 1\\n'
+ >>> startline = ' >>> foo = 1\\n'
>>> matcher.startsat(startline)
True
- >>> matcher.startsat(b' ... foo = 1\\n')
+ >>> matcher.startsat(' ... foo = 1\\n')
False
>>> ctx = matcher.startsat(startline)
>>> matcher.filename(ctx)
@@ -368,45 +358,45 @@
False
>>> b2s(matcher.codeatstart(ctx, startline))
'foo = 1\\n'
- >>> inside = b' >>> foo = 1\\n'
+ >>> inside = ' >>> foo = 1\\n'
>>> matcher.endsat(ctx, inside)
False
>>> matcher.isinside(ctx, inside)
True
>>> b2s(matcher.codeinside(ctx, inside))
'foo = 1\\n'
- >>> inside = b' ... foo = 1\\n'
+ >>> inside = ' ... foo = 1\\n'
>>> matcher.endsat(ctx, inside)
False
>>> matcher.isinside(ctx, inside)
True
>>> b2s(matcher.codeinside(ctx, inside))
'foo = 1\\n'
- >>> inside = b' expected output\\n'
+ >>> inside = ' expected output\\n'
>>> matcher.endsat(ctx, inside)
False
>>> matcher.isinside(ctx, inside)
True
>>> b2s(matcher.codeinside(ctx, inside))
'\\n'
- >>> inside = b' \\n'
+ >>> inside = ' \\n'
>>> matcher.endsat(ctx, inside)
False
>>> matcher.isinside(ctx, inside)
True
>>> b2s(matcher.codeinside(ctx, inside))
'\\n'
- >>> end = b' $ foo bar\\n'
+ >>> end = ' $ foo bar\\n'
>>> matcher.endsat(ctx, end)
True
>>> matcher.codeatend(ctx, end)
- >>> end = b'\\n'
+ >>> end = '\\n'
>>> matcher.endsat(ctx, end)
True
>>> matcher.codeatend(ctx, end)
"""
- _prefix = b' >>> '
- _prefixre = re.compile(br' (>>>|\.\.\.) ')
+ _prefix = ' >>> '
+ _prefixre = re.compile(r' (>>>|\.\.\.) ')
# If a line matches against not _prefixre but _outputre, that line
# is "an expected output line" (= not a part of code fragment).
@@ -416,10 +406,10 @@
# run-tests.py. But "directive line inside inline python code"
# should be rejected by Mercurial reviewers. Therefore, this
# regexp does not matche against such directive lines.
- _outputre = re.compile(br' $| [^$]')
+ _outputre = re.compile(r' $| [^$]')
def __init__(self):
- super(pydoctestmatcher, self).__init__(b"doctest style python code")
+ super(pydoctestmatcher, self).__init__("doctest style python code")
def startsat(self, line):
# ctx is "True"
@@ -446,57 +436,57 @@
def codeinside(self, ctx, line):
if self._prefixre.match(line):
return line[len(self._prefix):] # strip prefix ' >>> '/' ... '
- return b'\n' # an expected output line is treated as an empty line
+ return '\n' # an expected output line is treated as an empty line
class pyheredocmatcher(embeddedmatcher):
"""Detect "python << LIMIT" style embedded python code
>>> matcher = pyheredocmatcher()
- >>> b2s(matcher.startsat(b' $ python << EOF\\n'))
+ >>> b2s(matcher.startsat(' $ python << EOF\\n'))
' > EOF\\n'
- >>> b2s(matcher.startsat(b' $ $PYTHON <<EOF\\n'))
+ >>> b2s(matcher.startsat(' $ $PYTHON <<EOF\\n'))
' > EOF\\n'
- >>> b2s(matcher.startsat(b' $ "$PYTHON"<< "EOF"\\n'))
+ >>> b2s(matcher.startsat(' $ "$PYTHON"<< "EOF"\\n'))
' > EOF\\n'
- >>> b2s(matcher.startsat(b" $ $PYTHON << 'ANYLIMIT'\\n"))
+ >>> b2s(matcher.startsat(" $ $PYTHON << 'ANYLIMIT'\\n"))
' > ANYLIMIT\\n'
- >>> matcher.startsat(b' $ "$PYTHON" < EOF\\n')
- >>> start = b' $ python << EOF\\n'
+ >>> matcher.startsat(' $ "$PYTHON" < EOF\\n')
+ >>> start = ' $ python << EOF\\n'
>>> ctx = matcher.startsat(start)
>>> matcher.codeatstart(ctx, start)
>>> matcher.filename(ctx)
>>> matcher.ignores(ctx)
False
- >>> inside = b' > foo = 1\\n'
+ >>> inside = ' > foo = 1\\n'
>>> matcher.endsat(ctx, inside)
False
>>> matcher.isinside(ctx, inside)
True
>>> b2s(matcher.codeinside(ctx, inside))
'foo = 1\\n'
- >>> end = b' > EOF\\n'
+ >>> end = ' > EOF\\n'
>>> matcher.endsat(ctx, end)
True
>>> matcher.codeatend(ctx, end)
- >>> matcher.endsat(ctx, b' > EOFEOF\\n')
+ >>> matcher.endsat(ctx, ' > EOFEOF\\n')
False
- >>> ctx = matcher.startsat(b' $ python << NO_CHECK_EOF\\n')
+ >>> ctx = matcher.startsat(' $ python << NO_CHECK_EOF\\n')
>>> matcher.ignores(ctx)
True
"""
- _prefix = b' > '
+ _prefix = ' > '
- _startre = re.compile(br' \$ (\$PYTHON|"\$PYTHON"|python).*' +
+ _startre = re.compile(r' \$ (\$PYTHON|"\$PYTHON"|python).*' +
heredoclimitpat)
def __init__(self):
- super(pyheredocmatcher, self).__init__(b"heredoc python invocation")
+ super(pyheredocmatcher, self).__init__("heredoc python invocation")
def startsat(self, line):
# ctx is END-LINE-OF-EMBEDDED-CODE
matched = self._startre.match(line)
if matched:
- return b' > %s\n' % matched.group('limit')
+ return ' > %s\n' % matched.group('limit')
def endsat(self, ctx, line):
return ctx == line
@@ -505,7 +495,7 @@
return line.startswith(self._prefix)
def ignores(self, ctx):
- return b' > %s\n' % heredocignorelimit == ctx
+ return ' > %s\n' % heredocignorelimit == ctx
def filename(self, ctx):
return None # no filename
@@ -524,7 +514,7 @@
pyheredocmatcher(),
# use '[^<]+' instead of '\S+', in order to match against
# paths including whitespaces
- fileheredocmatcher(b'heredoc .py file', br'[^<]+\.py'),
+ fileheredocmatcher('heredoc .py file', r'[^<]+\.py'),
]
def pyembedded(basefile, lines, errors):
@@ -536,7 +526,7 @@
_shmatchers = [
# use '[^<]+' instead of '\S+', in order to match against
# paths including whitespaces
- fileheredocmatcher(b'heredoc .sh file', br'[^<]+\.sh'),
+ fileheredocmatcher('heredoc .sh file', r'[^<]+\.sh'),
]
def shembedded(basefile, lines, errors):
@@ -548,8 +538,8 @@
_hgrcmatchers = [
# use '[^<]+' instead of '\S+', in order to match against
# paths including whitespaces
- fileheredocmatcher(b'heredoc hgrc file',
- br'(([^/<]+/)+hgrc|\$HGRCPATH|\${HGRCPATH})'),
+ fileheredocmatcher('heredoc hgrc file',
+ r'(([^/<]+/)+hgrc|\$HGRCPATH|\${HGRCPATH})'),
]
def hgrcembedded(basefile, lines, errors):
@@ -565,14 +555,14 @@
errors = []
for name, starts, ends, code in embeddedfunc(basefile, lines, errors):
if not name:
- name = b'<anonymous>'
- writeout(b"%s:%d: %s starts\n" % (basefile, starts, name))
+ name = '<anonymous>'
+ writeout("%s:%d: %s starts\n" % (basefile, starts, name))
if opts.verbose and code:
- writeout(b" |%s\n" %
- b"\n |".join(l for l in code.splitlines()))
- writeout(b"%s:%d: %s ends\n" % (basefile, ends, name))
+ writeout(" |%s\n" %
+ "\n |".join(l for l in code.splitlines()))
+ writeout("%s:%d: %s ends\n" % (basefile, ends, name))
for e in errors:
- writeerr(b"%s\n" % e)
+ writeerr("%s\n" % e)
return len(errors)
def applyembedded(args, embeddedfunc, opts):
@@ -580,11 +570,11 @@
if args:
for f in args:
with opentext(f) as fp:
- if showembedded(bytestr(f), fp, embeddedfunc, opts):
+ if showembedded(f, fp, embeddedfunc, opts):
ret = 1
else:
- lines = [l for l in stdin.readlines()]
- if showembedded(b'<stdin>', lines, embeddedfunc, opts):
+ lines = [l for l in sys.stdin.readlines()]
+ if showembedded('<stdin>', lines, embeddedfunc, opts):
ret = 1
return ret
--- a/contrib/win32/mercurial.ini Tue Jul 09 10:07:35 2019 -0400
+++ b/contrib/win32/mercurial.ini Mon Jul 22 14:00:33 2019 -0400
@@ -64,7 +64,6 @@
;relink =
;schemes =
;share =
-;shelve =
;transplant =
;win32mbcs =
;zeroconf =
--- a/contrib/zsh_completion Tue Jul 09 10:07:35 2019 -0400
+++ b/contrib/zsh_completion Mon Jul 22 14:00:33 2019 -0400
@@ -1,4 +1,4 @@
-#compdef hg
+#compdef hg chg
# Zsh completion script for mercurial. Rename this file to _hg and copy
# it into your zsh function path (/usr/share/zsh/site-functions for
--- a/doc/gendoc.py Tue Jul 09 10:07:35 2019 -0400
+++ b/doc/gendoc.py Mon Jul 22 14:00:33 2019 -0400
@@ -120,7 +120,7 @@
# print cmds
ui.write(minirst.section(_(b"Commands")))
- commandprinter(ui, table, minirst.subsection)
+ commandprinter(ui, table, minirst.subsection, minirst.subsubsection)
# print help topics
# The config help topic is included in the hgrc.5 man page.
@@ -143,7 +143,8 @@
cmdtable = getattr(mod, 'cmdtable', None)
if cmdtable:
ui.write(minirst.subsubsection(_(b'Commands')))
- commandprinter(ui, cmdtable, minirst.subsubsubsection)
+ commandprinter(ui, cmdtable, minirst.subsubsubsection,
+ minirst.subsubsubsubsection)
def showtopic(ui, topic):
extrahelptable = [
@@ -177,7 +178,27 @@
ui.write(doc)
ui.write(b"\n")
-def commandprinter(ui, cmdtable, sectionfunc):
+def commandprinter(ui, cmdtable, sectionfunc, subsectionfunc):
+ """Render restructuredtext describing a list of commands and their
+ documentations, grouped by command category.
+
+ Args:
+ ui: UI object to write the output to
+ cmdtable: a dict that maps a string of the command name plus its aliases
+ (separated with pipes) to a 3-tuple of (the command's function, a list
+ of its option descriptions, and a string summarizing available
+ options). Example, with aliases added for demonstration purposes:
+
+ 'phase|alias1|alias2': (
+ <function phase at 0x7f0816b05e60>,
+ [ ('p', 'public', False, 'set changeset phase to public'),
+ ...,
+ ('r', 'rev', [], 'target revision', 'REV')],
+ '[-p|-d|-s] [-f] [-r] [REV...]'
+ )
+ sectionfunc: minirst function to format command category headers
+ subsectionfunc: minirst function to format command headers
+ """
h = {}
for c, attr in cmdtable.items():
f = c.split(b"|")[0]
@@ -185,45 +206,76 @@
h[f] = c
cmds = h.keys()
- for f in sorted(cmds):
- if f.startswith(b"debug"):
+ def helpcategory(cmd):
+ """Given a canonical command name from `cmds` (above), retrieve its
+ help category. If helpcategory is None, default to CATEGORY_NONE.
+ """
+ fullname = h[cmd]
+ details = cmdtable[fullname]
+ helpcategory = details[0].helpcategory
+ return helpcategory or help.registrar.command.CATEGORY_NONE
+
+ cmdsbycategory = {category: [] for category in help.CATEGORY_ORDER}
+ for cmd in cmds:
+ # If a command category wasn't registered, the command won't get
+ # rendered below, so we raise an AssertionError.
+ if helpcategory(cmd) not in cmdsbycategory:
+ raise AssertionError(
+ "The following command did not register its (category) in "
+ "help.CATEGORY_ORDER: %s (%s)" % (cmd, helpcategory(cmd)))
+ cmdsbycategory[helpcategory(cmd)].append(cmd)
+
+ # Print the help for each command. We present the commands grouped by
+ # category, and we use help.CATEGORY_ORDER as a guide for a helpful order
+ # in which to present the categories.
+ for category in help.CATEGORY_ORDER:
+ categorycmds = cmdsbycategory[category]
+ if not categorycmds:
+ # Skip empty categories
continue
- d = get_cmd(h[f], cmdtable)
- ui.write(sectionfunc(d[b'cmd']))
- # short description
- ui.write(d[b'desc'][0])
- # synopsis
- ui.write(b"::\n\n")
- synopsislines = d[b'synopsis'].splitlines()
- for line in synopsislines:
- # some commands (such as rebase) have a multi-line
+ # Print a section header for the category.
+ # For now, the category header is at the same level as the headers for
+ # the commands in the category; this is fixed in the next commit.
+ ui.write(sectionfunc(help.CATEGORY_NAMES[category]))
+ # Print each command in the category
+ for f in sorted(categorycmds):
+ if f.startswith(b"debug"):
+ continue
+ d = get_cmd(h[f], cmdtable)
+ ui.write(subsectionfunc(d[b'cmd']))
+ # short description
+ ui.write(d[b'desc'][0])
# synopsis
- ui.write(b" %s\n" % line)
- ui.write(b'\n')
- # description
- ui.write(b"%s\n\n" % d[b'desc'][1])
- # options
- opt_output = list(d[b'opts'])
- if opt_output:
- opts_len = max([len(line[0]) for line in opt_output])
- ui.write(_(b"Options:\n\n"))
- multioccur = False
- for optstr, desc in opt_output:
- if desc:
- s = b"%-*s %s" % (opts_len, optstr, desc)
- else:
- s = optstr
- ui.write(b"%s\n" % s)
- if optstr.endswith(b"[+]>"):
- multioccur = True
- if multioccur:
- ui.write(_(b"\n[+] marked option can be specified"
- b" multiple times\n"))
- ui.write(b"\n")
- # aliases
- if d[b'aliases']:
- ui.write(_(b" aliases: %s\n\n") % b" ".join(d[b'aliases']))
-
+ ui.write(b"::\n\n")
+ synopsislines = d[b'synopsis'].splitlines()
+ for line in synopsislines:
+ # some commands (such as rebase) have a multi-line
+ # synopsis
+ ui.write(b" %s\n" % line)
+ ui.write(b'\n')
+ # description
+ ui.write(b"%s\n\n" % d[b'desc'][1])
+ # options
+ opt_output = list(d[b'opts'])
+ if opt_output:
+ opts_len = max([len(line[0]) for line in opt_output])
+ ui.write(_(b"Options:\n\n"))
+ multioccur = False
+ for optstr, desc in opt_output:
+ if desc:
+ s = b"%-*s %s" % (opts_len, optstr, desc)
+ else:
+ s = optstr
+ ui.write(b"%s\n" % s)
+ if optstr.endswith(b"[+]>"):
+ multioccur = True
+ if multioccur:
+ ui.write(_(b"\n[+] marked option can be specified"
+ b" multiple times\n"))
+ ui.write(b"\n")
+ # aliases
+ if d[b'aliases']:
+ ui.write(_(b" aliases: %s\n\n") % b" ".join(d[b'aliases']))
def allextensionnames():
return set(extensions.enabled().keys()) | set(extensions.disabled().keys())
--- a/hgdemandimport/demandimportpy3.py Tue Jul 09 10:07:35 2019 -0400
+++ b/hgdemandimport/demandimportpy3.py Mon Jul 22 14:00:33 2019 -0400
@@ -32,6 +32,8 @@
import importlib.util
import sys
+from . import tracing
+
_deactivated = False
class _lazyloaderex(importlib.util.LazyLoader):
@@ -40,10 +42,11 @@
"""
def exec_module(self, module):
"""Make the module load lazily."""
- if _deactivated or module.__name__ in ignores:
- self.loader.exec_module(module)
- else:
- super().exec_module(module)
+ with tracing.log('demandimport %s', module):
+ if _deactivated or module.__name__ in ignores:
+ self.loader.exec_module(module)
+ else:
+ super().exec_module(module)
# This is 3.6+ because with Python 3.5 it isn't possible to lazily load
# extensions. See the discussion in https://bugs.python.org/issue26186 for more.
--- a/hgdemandimport/tracing.py Tue Jul 09 10:07:35 2019 -0400
+++ b/hgdemandimport/tracing.py Mon Jul 22 14:00:33 2019 -0400
@@ -13,19 +13,23 @@
_pipe = None
_checked = False
-@contextlib.contextmanager
-def log(whencefmt, *whenceargs):
+def _isactive():
global _pipe, _session, _checked
if _pipe is None:
if _checked:
- yield
- return
+ return False
_checked = True
if 'HGCATAPULTSERVERPIPE' not in os.environ:
- yield
- return
+ return False
_pipe = open(os.environ['HGCATAPULTSERVERPIPE'], 'w', 1)
_session = os.environ.get('HGCATAPULTSESSION', 'none')
+ return True
+
+@contextlib.contextmanager
+def log(whencefmt, *whenceargs):
+ if not _isactive():
+ yield
+ return
whence = whencefmt % whenceargs
try:
# Both writes to the pipe are wrapped in try/except to ignore
@@ -42,3 +46,13 @@
_pipe.write('END %s %s\n' % (_session, whence))
except IOError:
pass
+
+def counter(label, amount, *labelargs):
+ if not _isactive():
+ return
+ l = label % labelargs
+ # See above in log() for why this is in a try/except.
+ try:
+ _pipe.write('COUNTER %s %d %s\n' % (_session, amount, l))
+ except IOError:
+ pass
--- a/hgext/absorb.py Tue Jul 09 10:07:35 2019 -0400
+++ b/hgext/absorb.py Mon Jul 22 14:00:33 2019 -0400
@@ -871,7 +871,7 @@
patchlines = mdiff.splitnewlines(buf.getvalue())
# hunk.prettystr() will update hunk.removed
a2 = a1 + hunk.removed
- blines = [l[1:] for l in patchlines[1:] if l[0] != '-']
+ blines = [l[1:] for l in patchlines[1:] if not l.startswith('-')]
return path, (a1, a2, blines)
def overlaydiffcontext(ctx, chunks):
@@ -914,7 +914,10 @@
"""
if stack is None:
limit = ui.configint('absorb', 'max-stack-size')
- stack = getdraftstack(repo['.'], limit)
+ headctx = repo['.']
+ if len(headctx.parents()) > 1:
+ raise error.Abort(_('cannot absorb into a merge'))
+ stack = getdraftstack(headctx, limit)
if limit and len(stack) >= limit:
ui.warn(_('absorb: only the recent %d changesets will '
'be analysed\n')
@@ -932,7 +935,7 @@
if opts.get('interactive'):
diff = patch.diff(repo, stack[-1].node(), targetctx.node(), matcher)
origchunks = patch.parsepatch(diff)
- chunks = cmdutil.recordfilter(ui, origchunks)[0]
+ chunks = cmdutil.recordfilter(ui, origchunks, matcher)[0]
targetctx = overlaydiffcontext(stack[-1], chunks)
fm = None
if opts.get('print_changes') or not opts.get('apply_changes'):
--- a/hgext/automv.py Tue Jul 09 10:07:35 2019 -0400
+++ b/hgext/automv.py Mon Jul 22 14:00:33 2019 -0400
@@ -81,10 +81,10 @@
"""
stat = repo.status(match=matcher)
- added = stat[1]
- removed = stat[2]
+ added = stat.added
+ removed = stat.removed
- copy = copies._forwardcopies(repo['.'], repo[None], matcher)
+ copy = copies.pathcopies(repo['.'], repo[None], matcher)
# remove the copy files for which we already have copy info
added = [f for f in added if f not in copy]
--- a/hgext/blackbox.py Tue Jul 09 10:07:35 2019 -0400
+++ b/hgext/blackbox.py Mon Jul 22 14:00:33 2019 -0400
@@ -9,12 +9,14 @@
"""log repository events to a blackbox for debugging
Logs event information to .hg/blackbox.log to help debug and diagnose problems.
-The events that get logged can be configured via the blackbox.track config key.
+The events that get logged can be configured via the blackbox.track and
+blackbox.ignore config keys.
Examples::
[blackbox]
track = *
+ ignore = pythonhook
# dirty is *EXPENSIVE* (slow);
# each log entry indicates `+` if the repository is dirty, like :hg:`id`.
dirty = True
@@ -84,6 +86,9 @@
configitem('blackbox', 'track',
default=lambda: ['*'],
)
+configitem('blackbox', 'ignore',
+ default=lambda: ['chgserver', 'cmdserver', 'extension'],
+)
configitem('blackbox', 'date-format',
default='%Y/%m/%d %H:%M:%S',
)
@@ -94,12 +99,15 @@
def __init__(self, ui, repo):
self._repo = repo
self._trackedevents = set(ui.configlist('blackbox', 'track'))
+ self._ignoredevents = set(ui.configlist('blackbox', 'ignore'))
self._maxfiles = ui.configint('blackbox', 'maxfiles')
self._maxsize = ui.configbytes('blackbox', 'maxsize')
self._inlog = False
def tracked(self, event):
- return b'*' in self._trackedevents or event in self._trackedevents
+ return ((b'*' in self._trackedevents
+ and event not in self._ignoredevents)
+ or event in self._trackedevents)
def log(self, ui, event, msg, opts):
# self._log() -> ctx.dirty() may create new subrepo instance, which
--- a/hgext/convert/__init__.py Tue Jul 09 10:07:35 2019 -0400
+++ b/hgext/convert/__init__.py Mon Jul 22 14:00:33 2019 -0400
@@ -439,6 +439,11 @@
:convert.hg.sourcename: records the given string as a 'convert_source' extra
value on each commit made in the target repository. The default is None.
+ :convert.hg.preserve-hash: only works with mercurial sources. Make convert
+ prevent performance improvement to the list of modified files in commits
+ when such an improvement would cause the hash of a commit to change.
+ The default is False.
+
All Destinations
################
--- a/hgext/convert/common.py Tue Jul 09 10:07:35 2019 -0400
+++ b/hgext/convert/common.py Mon Jul 22 14:00:33 2019 -0400
@@ -114,7 +114,7 @@
class commit(object):
def __init__(self, author, date, desc, parents, branch=None, rev=None,
extra=None, sortkey=None, saverev=True, phase=phases.draft,
- optparents=None):
+ optparents=None, ctx=None):
self.author = author or 'unknown'
self.date = date or '0 0'
self.desc = desc
@@ -126,6 +126,7 @@
self.sortkey = sortkey
self.saverev = saverev
self.phase = phase
+ self.ctx = ctx # for hg to hg conversions
class converter_source(object):
"""Conversion source interface"""
--- a/hgext/convert/hg.py Tue Jul 09 10:07:35 2019 -0400
+++ b/hgext/convert/hg.py Mon Jul 22 14:00:33 2019 -0400
@@ -339,7 +339,11 @@
phases.phasenames[commit.phase], 'convert')
with self.repo.transaction("convert") as tr:
- node = nodemod.hex(self.repo.commitctx(ctx))
+ if self.repo.ui.config('convert', 'hg.preserve-hash'):
+ origctx = commit.ctx
+ else:
+ origctx = None
+ node = nodemod.hex(self.repo.commitctx(ctx, origctx=origctx))
# If the node value has changed, but the phase is lower than
# draft, set it back to draft since it hasn't been exposed
@@ -591,7 +595,8 @@
extra=ctx.extra(),
sortkey=ctx.rev(),
saverev=self.saverev,
- phase=ctx.phase())
+ phase=ctx.phase(),
+ ctx=ctx)
def numcommits(self):
return len(self.repo)
--- a/hgext/convert/monotone.py Tue Jul 09 10:07:35 2019 -0400
+++ b/hgext/convert/monotone.py Mon Jul 22 14:00:33 2019 -0400
@@ -284,9 +284,9 @@
# d2 => d3
ignoremove[tofile] = 1
for tofile, fromfile in renamed.items():
- self.ui.debug (_("copying file in renamed directory "
- "from '%s' to '%s'")
- % (fromfile, tofile), '\n')
+ self.ui.debug(
+ "copying file in renamed directory from '%s' to '%s'"
+ % (fromfile, tofile), '\n')
files[tofile] = rev
copies[tofile] = fromfile
for fromfile in renamed.values():
@@ -370,4 +370,3 @@
self.mtnwritefp = None
self.mtnreadfp.close()
self.mtnreadfp = None
-
--- a/hgext/convert/subversion.py Tue Jul 09 10:07:35 2019 -0400
+++ b/hgext/convert/subversion.py Mon Jul 22 14:00:33 2019 -0400
@@ -1333,7 +1333,7 @@
rev = self.commit_re.search(output).group(1)
except AttributeError:
if not files:
- return parents[0] if parents else None
+ return parents[0] if parents else 'None'
self.ui.warn(_('unexpected svn output:\n'))
self.ui.warn(output)
raise error.Abort(_('unable to cope with svn output'))
--- a/hgext/eol.py Tue Jul 09 10:07:35 2019 -0400
+++ b/hgext/eol.py Mon Jul 22 14:00:33 2019 -0400
@@ -400,7 +400,7 @@
if wlock is not None:
wlock.release()
- def commitctx(self, ctx, error=False):
+ def commitctx(self, ctx, error=False, origctx=None):
for f in sorted(ctx.added() + ctx.modified()):
if not self._eolmatch(f):
continue
@@ -416,6 +416,6 @@
if inconsistenteol(data):
raise errormod.Abort(_("inconsistent newline style "
"in %s\n") % f)
- return super(eolrepo, self).commitctx(ctx, error)
+ return super(eolrepo, self).commitctx(ctx, error, origctx)
repo.__class__ = eolrepo
repo._hgcleardirstate()
--- a/hgext/fastannotate/revmap.py Tue Jul 09 10:07:35 2019 -0400
+++ b/hgext/fastannotate/revmap.py Mon Jul 22 14:00:33 2019 -0400
@@ -8,6 +8,7 @@
from __future__ import absolute_import
import bisect
+import io
import os
import struct
@@ -246,7 +247,7 @@
hsh = None
try:
with open(path, 'rb') as f:
- f.seek(-_hshlen, 2)
+ f.seek(-_hshlen, io.SEEK_END)
if f.tell() > len(revmap.HEADER):
hsh = f.read(_hshlen)
except IOError:
--- a/hgext/fix.py Tue Jul 09 10:07:35 2019 -0400
+++ b/hgext/fix.py Mon Jul 22 14:00:33 2019 -0400
@@ -72,12 +72,43 @@
To account for changes made by each tool, the line numbers used for incremental
formatting are recomputed before executing the next tool. So, each tool may see
different values for the arguments added by the :linerange suboption.
+
+Each fixer tool is allowed to return some metadata in addition to the fixed file
+content. The metadata must be placed before the file content on stdout,
+separated from the file content by a zero byte. The metadata is parsed as a JSON
+value (so, it should be UTF-8 encoded and contain no zero bytes). A fixer tool
+is expected to produce this metadata encoding if and only if the :metadata
+suboption is true::
+
+ [fix]
+ tool:command = tool --prepend-json-metadata
+ tool:metadata = true
+
+The metadata values are passed to hooks, which can be used to print summaries or
+perform other post-fixing work. The supported hooks are::
+
+ "postfixfile"
+ Run once for each file in each revision where any fixer tools made changes
+ to the file content. Provides "$HG_REV" and "$HG_PATH" to identify the file,
+ and "$HG_METADATA" with a map of fixer names to metadata values from fixer
+ tools that affected the file. Fixer tools that didn't affect the file have a
+ valueof None. Only fixer tools that executed are present in the metadata.
+
+ "postfix"
+ Run once after all files and revisions have been handled. Provides
+ "$HG_REPLACEMENTS" with information about what revisions were created and
+ made obsolete. Provides a boolean "$HG_WDIRWRITTEN" to indicate whether any
+ files in the working copy were updated. Provides a list "$HG_METADATA"
+ mapping fixer tool names to lists of metadata values returned from
+ executions that modified a file. This aggregates the same metadata
+ previously passed to the "postfixfile" hook.
"""
from __future__ import absolute_import
import collections
import itertools
+import json
import os
import re
import subprocess
@@ -117,13 +148,14 @@
configtable = {}
configitem = registrar.configitem(configtable)
-# Register the suboptions allowed for each configured fixer.
+# Register the suboptions allowed for each configured fixer, and default values.
FIXER_ATTRS = {
'command': None,
'linerange': None,
'fileset': None,
'pattern': None,
'priority': 0,
+ 'metadata': False,
}
for key, default in FIXER_ATTRS.items():
@@ -201,10 +233,12 @@
for rev, path in items:
ctx = repo[rev]
olddata = ctx[path].data()
- newdata = fixfile(ui, opts, fixers, ctx, path, basectxs[rev])
+ metadata, newdata = fixfile(ui, opts, fixers, ctx, path,
+ basectxs[rev])
# Don't waste memory/time passing unchanged content back, but
# produce one result per item either way.
- yield (rev, path, newdata if newdata != olddata else None)
+ yield (rev, path, metadata,
+ newdata if newdata != olddata else None)
results = worker.worker(ui, 1.0, getfixes, tuple(), workqueue,
threadsafe=False)
@@ -215,15 +249,25 @@
# the tests deterministic. It might also be considered a feature since
# it makes the results more easily reproducible.
filedata = collections.defaultdict(dict)
+ aggregatemetadata = collections.defaultdict(list)
replacements = {}
wdirwritten = False
commitorder = sorted(revstofix, reverse=True)
with ui.makeprogress(topic=_('fixing'), unit=_('files'),
total=sum(numitems.values())) as progress:
- for rev, path, newdata in results:
+ for rev, path, filerevmetadata, newdata in results:
progress.increment(item=path)
+ for fixername, fixermetadata in filerevmetadata.items():
+ aggregatemetadata[fixername].append(fixermetadata)
if newdata is not None:
filedata[rev][path] = newdata
+ hookargs = {
+ 'rev': rev,
+ 'path': path,
+ 'metadata': filerevmetadata,
+ }
+ repo.hook('postfixfile', throw=False,
+ **pycompat.strkwargs(hookargs))
numitems[rev] -= 1
# Apply the fixes for this and any other revisions that are
# ready and sitting at the front of the queue. Using a loop here
@@ -240,6 +284,12 @@
del filedata[rev]
cleanup(repo, replacements, wdirwritten)
+ hookargs = {
+ 'replacements': replacements,
+ 'wdirwritten': wdirwritten,
+ 'metadata': aggregatemetadata,
+ }
+ repo.hook('postfix', throw=True, **pycompat.strkwargs(hookargs))
def cleanup(repo, replacements, wdirwritten):
"""Calls scmutil.cleanupnodes() with the given replacements.
@@ -491,6 +541,7 @@
A fixer tool's stdout will become the file's new content if and only if it
exits with code zero.
"""
+ metadata = {}
newdata = fixctx[path].data()
for fixername, fixer in fixers.iteritems():
if fixer.affects(opts, fixctx, path):
@@ -506,9 +557,20 @@
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
- newerdata, stderr = proc.communicate(newdata)
+ stdout, stderr = proc.communicate(newdata)
if stderr:
showstderr(ui, fixctx.rev(), fixername, stderr)
+ newerdata = stdout
+ if fixer.shouldoutputmetadata():
+ try:
+ metadatajson, newerdata = stdout.split('\0', 1)
+ metadata[fixername] = json.loads(metadatajson)
+ except ValueError:
+ ui.warn(_('ignored invalid output from fixer tool: %s\n') %
+ (fixername,))
+ continue
+ else:
+ metadata[fixername] = None
if proc.returncode == 0:
newdata = newerdata
else:
@@ -519,7 +581,7 @@
ui, _('no fixes will be applied'),
hint=_('use --config fix.failure=continue to apply any '
'successful fixes anyway'))
- return newdata
+ return metadata, newdata
def showstderr(ui, rev, fixername, stderr):
"""Writes the lines of the stderr string as warnings on the ui
@@ -667,6 +729,10 @@
"""Should this fixer run on the file at the given path and context?"""
return scmutil.match(fixctx, [self._pattern], opts)(path)
+ def shouldoutputmetadata(self):
+ """Should the stdout of this fixer start with JSON and a null byte?"""
+ return self._metadata
+
def command(self, ui, path, rangesfn):
"""A shell command to use to invoke this fixer on the given file/lines
--- a/hgext/githelp.py Tue Jul 09 10:07:35 2019 -0400
+++ b/hgext/githelp.py Mon Jul 22 14:00:33 2019 -0400
@@ -192,12 +192,15 @@
def apply(ui, repo, *args, **kwargs):
cmdoptions = [
('p', 'p', int, ''),
+ ('', 'directory', '', ''),
]
args, opts = parseoptions(ui, cmdoptions, args)
cmd = Command('import --no-commit')
if (opts.get('p')):
cmd['-p'] = opts.get('p')
+ if opts.get('directory'):
+ cmd['--prefix'] = opts.get('directory')
cmd.extend(args)
ui.status((bytes(cmd)), "\n")
@@ -681,6 +684,7 @@
def mv(ui, repo, *args, **kwargs):
cmdoptions = [
('f', 'force', None, ''),
+ ('n', 'dry-run', None, ''),
]
args, opts = parseoptions(ui, cmdoptions, args)
@@ -689,6 +693,8 @@
if opts.get('force'):
cmd['-f'] = None
+ if opts.get('dry_run'):
+ cmd['-n'] = None
ui.status((bytes(cmd)), "\n")
@@ -917,6 +923,7 @@
def stash(ui, repo, *args, **kwargs):
cmdoptions = [
+ ('p', 'patch', None, ''),
]
args, opts = parseoptions(ui, cmdoptions, args)
@@ -925,6 +932,17 @@
if action == 'list':
cmd['-l'] = None
+ if opts.get('patch'):
+ cmd['-p'] = None
+ elif action == 'show':
+ if opts.get('patch'):
+ cmd['-p'] = None
+ else:
+ cmd['--stat'] = None
+ if len(args) > 1:
+ cmd.append(args[1])
+ elif action == 'clear':
+ cmd['--cleanup'] = None
elif action == 'drop':
cmd['-d'] = None
if len(args) > 1:
@@ -937,10 +955,9 @@
cmd.append(args[1])
if action == 'apply':
cmd['--keep'] = None
- elif (action == 'branch' or action == 'show' or action == 'clear'
- or action == 'create'):
+ elif action == 'branch' or action == 'create':
ui.status(_("note: Mercurial doesn't have equivalents to the "
- "git stash branch, show, clear, or create actions\n\n"))
+ "git stash branch or create actions\n\n"))
return
else:
if len(args) > 0:
--- a/hgext/gpg.py Tue Jul 09 10:07:35 2019 -0400
+++ b/hgext/gpg.py Mon Jul 22 14:00:33 2019 -0400
@@ -49,6 +49,11 @@
# Custom help category
_HELP_CATEGORY = 'gpg'
+help.CATEGORY_ORDER.insert(
+ help.CATEGORY_ORDER.index(registrar.command.CATEGORY_HELP),
+ _HELP_CATEGORY
+)
+help.CATEGORY_NAMES[_HELP_CATEGORY] = 'Signing changes (GPG)'
class gpg(object):
def __init__(self, path, key=None):
--- a/hgext/histedit.py Tue Jul 09 10:07:35 2019 -0400
+++ b/hgext/histedit.py Mon Jul 22 14:00:33 2019 -0400
@@ -1079,6 +1079,8 @@
def changemode(state, mode):
curmode, _ = state['mode']
state['mode'] = (mode, curmode)
+ if mode == MODE_PATCH:
+ state['modes'][MODE_PATCH]['patchcontents'] = patchcontents(state)
def makeselection(state, pos):
state['selected'] = pos
@@ -1134,7 +1136,7 @@
if mode != MODE_PATCH:
return
mode_state = state['modes'][mode]
- num_lines = len(patchcontents(state))
+ num_lines = len(mode_state['patchcontents'])
page_height = state['page_height']
unit = page_height if unit == 'page' else 1
num_pages = 1 + (num_lines - 1) / page_height
@@ -1227,15 +1229,25 @@
else:
win.addstr(y, x, line)
+def _trunc_head(line, n):
+ if len(line) <= n:
+ return line
+ return '> ' + line[-(n - 2):]
+def _trunc_tail(line, n):
+ if len(line) <= n:
+ return line
+ return line[:n - 2] + ' >'
+
def patchcontents(state):
repo = state['repo']
rule = state['rules'][state['pos']]
- repo.ui.verbose = True
displayer = logcmdutil.changesetdisplayer(repo.ui, repo, {
"patch": True, "template": "status"
}, buffered=True)
- displayer.show(rule.ctx)
- displayer.close()
+ overrides = {('ui', 'verbose'): True}
+ with repo.ui.configoverride(overrides, source='histedit'):
+ displayer.show(rule.ctx)
+ displayer.close()
return displayer.hunk[rule.ctx.rev()].splitlines()
def _chisteditmain(repo, rules, stdscr):
@@ -1283,11 +1295,23 @@
line = "bookmark: {0}".format(' '.join(bms))
win.addstr(3, 1, line[:length])
- line = "files: {0}".format(','.join(ctx.files()))
+ line = "summary: {0}".format(ctx.description().splitlines()[0])
win.addstr(4, 1, line[:length])
- line = "summary: {0}".format(ctx.description().splitlines()[0])
- win.addstr(5, 1, line[:length])
+ line = "files: "
+ win.addstr(5, 1, line)
+ fnx = 1 + len(line)
+ fnmaxx = length - fnx + 1
+ y = 5
+ fnmaxn = maxy - (1 + y) - 1
+ files = ctx.files()
+ for i, line1 in enumerate(files):
+ if len(files) > fnmaxn and i == fnmaxn - 1:
+ win.addstr(y, fnx, _trunc_tail(','.join(files[i:]), fnmaxx))
+ y = y + 1
+ break
+ win.addstr(y, fnx, _trunc_head(line1, fnmaxx))
+ y = y + 1
conflicts = rule.conflicts
if len(conflicts) > 0:
@@ -1296,7 +1320,7 @@
else:
conflictstr = 'no overlap'
- win.addstr(6, 1, conflictstr[:length])
+ win.addstr(y, 1, conflictstr[:length])
win.noutrefresh()
def helplines(mode):
@@ -1372,15 +1396,16 @@
def renderpatch(win, state):
start = state['modes'][MODE_PATCH]['line_offset']
- renderstring(win, state, patchcontents(state)[start:], diffcolors=True)
+ content = state['modes'][MODE_PATCH]['patchcontents']
+ renderstring(win, state, content[start:], diffcolors=True)
def layout(mode):
maxy, maxx = stdscr.getmaxyx()
helplen = len(helplines(mode))
return {
- 'commit': (8, maxx),
+ 'commit': (12, maxx),
'help': (helplen, maxx),
- 'main': (maxy - helplen - 8, maxx),
+ 'main': (maxy - helplen - 12, maxx),
}
def drawvertwin(size, y, x):
@@ -1894,6 +1919,14 @@
finally:
state.clear()
+def hgaborthistedit(ui, repo):
+ state = histeditstate(repo)
+ nobackup = not ui.configbool('rewrite', 'backup-bundle')
+ with repo.wlock() as wlock, repo.lock() as lock:
+ state.wlock = wlock
+ state.lock = lock
+ _aborthistedit(ui, repo, state, nobackup=nobackup)
+
def _edithisteditplan(ui, repo, state, rules):
state.read()
if not rules:
@@ -2288,8 +2321,6 @@
def extsetup(ui):
cmdutil.summaryhooks.add('histedit', summaryhook)
- cmdutil.unfinishedstates.append(
- ['histedit-state', False, True, _('histedit in progress'),
- _("use 'hg histedit --continue' or 'hg histedit --abort'")])
- cmdutil.afterresolvedstates.append(
- ['histedit-state', _('hg histedit --continue')])
+ statemod.addunfinished('histedit', fname='histedit-state', allowcommit=True,
+ continueflag=True, abortfunc=hgaborthistedit)
+
--- a/hgext/keyword.py Tue Jul 09 10:07:35 2019 -0400
+++ b/hgext/keyword.py Mon Jul 22 14:00:33 2019 -0400
@@ -785,8 +785,8 @@
finally:
del self.commitctx
- def kwcommitctx(self, ctx, error=False):
- n = super(kwrepo, self).commitctx(ctx, error)
+ def kwcommitctx(self, ctx, error=False, origctx=None):
+ n = super(kwrepo, self).commitctx(ctx, error, origctx)
# no lock needed, only called from repo.commit() which already locks
if not kwt.postcommit:
restrict = kwt.restrict
--- a/hgext/largefiles/overrides.py Tue Jul 09 10:07:35 2019 -0400
+++ b/hgext/largefiles/overrides.py Mon Jul 22 14:00:33 2019 -0400
@@ -515,7 +515,7 @@
return actions, diverge, renamedelete
@eh.wrapfunction(merge, 'recordupdates')
-def mergerecordupdates(orig, repo, actions, branchmerge):
+def mergerecordupdates(orig, repo, actions, branchmerge, getfiledata):
if 'lfmr' in actions:
lfdirstate = lfutil.openlfdirstate(repo.ui, repo)
for lfile, args, msg in actions['lfmr']:
@@ -526,7 +526,7 @@
lfdirstate.add(lfile)
lfdirstate.write()
- return orig(repo, actions, branchmerge)
+ return orig(repo, actions, branchmerge, getfiledata)
# Override filemerge to prompt the user about how they wish to merge
# largefiles. This will handle identical edits without prompting the user.
@@ -545,7 +545,8 @@
(dhash == ahash or
repo.ui.promptchoice(
_('largefile %s has a merge conflict\nancestor was %s\n'
- 'keep (l)ocal %s or\ntake (o)ther %s?'
+ 'you can keep (l)ocal %s or take (o)ther %s.\n'
+ 'what do you want to do?'
'$$ &Local $$ &Other') %
(lfutil.splitstandin(orig), ahash, dhash, ohash),
0) == 1)):
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/hgext/lfs/TODO.rst Mon Jul 22 14:00:33 2019 -0400
@@ -0,0 +1,195 @@
+Prior to removing (EXPERIMENTAL)
+--------------------------------
+
+These things affect UI and/or behavior, and should probably be implemented (or
+ruled out) prior to taking off the experimental shrinkwrap.
+
+#. Finish the `hg convert` story
+
+ * Add an argument to accept a rules file to apply during conversion?
+ Currently `lfs.track` is the only way to affect the conversion.
+ * drop `lfs.track` config settings
+ * splice in `.hglfs` file for normal repo -> lfs conversions?
+
+#. Stop uploading blobs when pushing between local repos
+
+ * Could probably hardlink directly to the other local repo's store
+ * Support inferring `lfs.url` for local push/pull (currently only supports
+ http)
+
+#. Stop uploading blobs on strip/amend/histedit/etc.
+
+ * This seems to be a side effect of doing it for `hg bundle`, which probably
+ makes sense.
+
+#. Handle a server with the extension loaded and a client without the extension
+ more gracefully.
+
+ * `changegroup3` is still experimental, and not enabled by default.
+ * Figure out how to `introduce LFS to the server repo
+ <https://www.mercurial-scm.org/pipermail/mercurial-devel/2018-September/122281.html>`_.
+ See the TODO in test-lfs-serve.t.
+
+#. Remove `lfs.retry` hack in client? This came from FB, but it's not clear why
+ it is/was needed.
+
+#. `hg export` currently writes out the LFS blob. Should it write the pointer
+ instead?
+
+ * `hg diff` is similar, and probably shouldn't see the pointer file
+
+#. `Fix https multiplexing, and re-enable workers
+ <https://www.mercurial-scm.org/pipermail/mercurial-devel/2018-January/109916.html>`_.
+
+#. Show to-be-applied rules with `hg files -r 'wdir()' 'set:lfs()'`
+
+ * `debugignore` can show file + line number, so a dedicated command could be
+ useful too.
+
+#. Filesets, revsets and templates
+
+ * A dedicated revset should be faster than `'file(set:lfs())'`
+ * Attach `{lfsoid}` and `{lfspointer}` to `general keywords
+ <https://www.mercurial-scm.org/pipermail/mercurial-devel/2018-January/110251.html>`_,
+ IFF the file is a blob
+ * Drop existing items that would be redundant with general support
+
+#. Can `grep` avoid downloading most things?
+
+ * Add a command option to skip LFS blobs?
+
+#. Add a flag that's visible in `hg files -v` to indicate external storage?
+
+#. Server side issues
+
+ * Check for local disk space before allowing upload. (I've got a patch for
+ this.)
+ * Make sure the http codes used are appropriate.
+ * `Why is copying the Authorization header into the JSON payload necessary
+ <https://www.mercurial-scm.org/pipermail/mercurial-devel/2018-April/116230.html>`_?
+ * `LFS-Authenticate` header support in client and server(?)
+
+#. Add locks on cache and blob store
+
+ * This is complicated with a global store, and multiple potentially unrelated
+ local repositories that reference the same blob.
+ * Alternately, maybe just handle collisions when trying to create the same
+ blob in the store somehow.
+
+#. Are proper file sizes reported in `debugupgraderepo`?
+
+#. Finish prefetching files
+
+ * `-T {data}` (other than cat?)
+ * `verify`
+ * `grep`
+
+#. Output cleanup
+
+ * Can we print the url when connecting to the blobstore? (A sudden
+ connection refused after pulling commits looks confusing.) Problem is,
+ 'pushing to main url' is printed, and then lfs wants to upload before going
+ back to the main repo transfer, so then *that* could be confusing with
+ extra output. (This is kinda improved with 380f5131ee7b and 9f78d10742af.)
+
+ * Add more progress indicators? Uploading a large repo looks idle for a long
+ time while it scans for blobs in each outgoing revision.
+
+ * Print filenames instead of hashes in error messages
+
+ * subrepo aware paths, where necessary
+
+ * Is existing output at the right status/note/debug level?
+
+#. Can `verify` be done without downloading everything?
+
+ * If we know that we are talking to an hg server, we can leverage the fact
+ that it validates in the Batch API portion, and skip d/l altogether. OTOH,
+ maybe we should download the files unconditionally for forensics. The
+ alternative is to define a custom transfer handler that definitively
+ verifies without transferring, and then cache those results. When verify
+ comes looking, look in the cache instead of actually opening the file and
+ processing it.
+
+ * Yuya has concerns about when blob fetch takes place vs when revlog is
+ verified. Since the visible hash matches the blob content, I don't think
+ there's a way to verify the pointer file that's actually stored in the
+ filelog (other than basic JSON checks). Full verification requires the
+ blob. See
+ https://www.mercurial-scm.org/pipermail/mercurial-devel/2018-April/116133.html
+
+ * Opening a corrupt pointer file aborts. It probably shouldn't for verify.
+
+
+Future ideas/features/polishing
+-------------------------------
+
+These aren't in any particular order, and are things that don't have obvious BC
+concerns.
+
+#. Garbage collection `(issue5790) <https://bz.mercurial-scm.org/show_bug.cgi?id=5790>`_
+
+ * This gets complicated because of the global cache, which may or may not
+ consist of hardlinks to the repo, and may be in use by other repos. (So
+ the gc may be pointless.)
+
+#. `Compress blobs <https://github.com/git-lfs/git-lfs/issues/260>`_
+
+ * 700MB repo becomes 2.5GB with all lfs blobs
+ * What implications are there for filesystem paths that don't indicate
+ compression? (i.e. how to share with global cache and other local repos?)
+ * Probably needs to be stored under `.hg/store/lfs/zstd`, with a repo
+ requirement.
+ * Allow tuneable compression type and settings?
+ * Support compression over the wire if both sides understand the compression?
+ * `debugupgraderepo` to convert?
+ * Probably not worth supporting compressed and uncompressed concurrently
+
+#. Determine things to upload with `readfast()
+ <https://www.mercurial-scm.org/pipermail/mercurial-devel/2018-August/121315.html>`_
+
+ * Significantly faster when pushing an entire large repo to http.
+ * Causes test changes to fileset and templates; may need both this and
+ current methods of lookup.
+
+#. Is a command to download everything needed? This would allow copying the
+ whole to a portable drive. Currently this can be effected by running
+ `hg verify`.
+
+#. Stop reading in entire file into one buffer when passing through filelog
+ interface
+
+ * `Requires major replumbing to core
+ <https://www.mercurial-scm.org/wiki/HandlingLargeFiles>`_
+
+#. Keep corrupt files around in 'store/lfs/incoming' for forensics?
+
+ * Files should be downloaded to 'incoming', and moved to normal location when
+ done.
+
+#. Client side path enhancements
+
+ * Support paths.default:lfs = ... style paths
+ * SSH -> https server inference
+
+ * https://www.mercurial-scm.org/pipermail/mercurial-devel/2018-April/115416.html
+ * https://github.com/git-lfs/git-lfs/blob/master/docs/api/server-discovery.md#guessing-the-server
+
+#. Server enhancements
+
+ * Add support for transfer quotas?
+ * Download should be able to send the file in chunks, without reading the
+ whole thing into memory
+ (https://www.mercurial-scm.org/pipermail/mercurial-devel/2018-March/114584.html)
+ * Support for resuming transfers
+
+#. Handle 3rd party server storage.
+
+ * Teach client to handle lfs `verify` action. This is needed after the
+ server instructs the client to upload the file to another server, in order
+ to tell the server that the upload completed.
+ * Teach the server to send redirects if configured, and process `verify`
+ requests.
+
+#. `Is any hg-git work needed
+ <https://groups.google.com/d/msg/hg-git/XYNQuudteeM/ivt8gXoZAAAJ>`_?
--- a/hgext/lfs/__init__.py Tue Jul 09 10:07:35 2019 -0400
+++ b/hgext/lfs/__init__.py Mon Jul 22 14:00:33 2019 -0400
@@ -227,9 +227,9 @@
class lfsrepo(repo.__class__):
@localrepo.unfilteredmethod
- def commitctx(self, ctx, error=False):
+ def commitctx(self, ctx, error=False, origctx=None):
repo.svfs.options['lfstrack'] = _trackedmatcher(self)
- return super(lfsrepo, self).commitctx(ctx, error)
+ return super(lfsrepo, self).commitctx(ctx, error, origctx=origctx)
repo.__class__ = lfsrepo
--- a/hgext/mq.py Tue Jul 09 10:07:35 2019 -0400
+++ b/hgext/mq.py Mon Jul 22 14:00:33 2019 -0400
@@ -144,9 +144,21 @@
stripext = extensions.load(dummyui(), 'strip', '')
strip = stripext.strip
-checksubstate = stripext.checksubstate
-checklocalchanges = stripext.checklocalchanges
-
+
+def checksubstate(repo, baserev=None):
+ '''return list of subrepos at a different revision than substate.
+ Abort if any subrepos have uncommitted changes.'''
+ inclsubs = []
+ wctx = repo[None]
+ if baserev:
+ bctx = repo[baserev]
+ else:
+ bctx = wctx.p1()
+ for s in sorted(wctx.substate):
+ wctx.sub(s).bailifchanged(True)
+ if s not in bctx.substate or bctx.sub(s).dirty():
+ inclsubs.append(s)
+ return inclsubs
# Patch names looks like unix-file names.
# They must be joinable with queue directory and result in the patch path.
@@ -1149,7 +1161,19 @@
# plain versions for i18n tool to detect them
_("local changes found, qrefresh first")
_("local changed subrepos found, qrefresh first")
- return checklocalchanges(repo, force, excsuffix)
+
+ s = repo.status()
+ if not force:
+ cmdutil.checkunfinished(repo)
+ if s.modified or s.added or s.removed or s.deleted:
+ _("local changes found") # i18n tool detection
+ raise error.Abort(_("local changes found" + excsuffix))
+ if checksubstate(repo):
+ _("local changed subrepos found") # i18n tool detection
+ raise error.Abort(_("local changed subrepos found" + excsuffix))
+ else:
+ cmdutil.checkunfinished(repo, skipmerge=True)
+ return s
_reserved = ('series', 'status', 'guards', '.', '..')
def checkreservedname(self, name):
--- a/hgext/narrow/narrowbundle2.py Tue Jul 09 10:07:35 2019 -0400
+++ b/hgext/narrow/narrowbundle2.py Mon Jul 22 14:00:33 2019 -0400
@@ -51,21 +51,25 @@
assert repo.ui.configbool('experimental', 'narrowservebrokenellipses')
cgversions = b2caps.get('changegroup')
- if cgversions: # 3.1 and 3.2 ship with an empty value
- cgversions = [v for v in cgversions
- if v in changegroup.supportedoutgoingversions(repo)]
- if not cgversions:
- raise ValueError(_('no common changegroup version'))
- version = max(cgversions)
- else:
- raise ValueError(_("server does not advertise changegroup version,"
- " can't negotiate support for ellipsis nodes"))
+ cgversions = [v for v in cgversions
+ if v in changegroup.supportedoutgoingversions(repo)]
+ if not cgversions:
+ raise ValueError(_('no common changegroup version'))
+ version = max(cgversions)
- include = sorted(filter(bool, kwargs.get(r'includepats', [])))
- exclude = sorted(filter(bool, kwargs.get(r'excludepats', [])))
- newmatch = narrowspec.match(repo.root, include=include, exclude=exclude)
+ oldinclude = sorted(filter(bool, kwargs.get(r'oldincludepats', [])))
+ oldexclude = sorted(filter(bool, kwargs.get(r'oldexcludepats', [])))
+ newinclude = sorted(filter(bool, kwargs.get(r'includepats', [])))
+ newexclude = sorted(filter(bool, kwargs.get(r'excludepats', [])))
+ known = {bin(n) for n in kwargs.get(r'known', [])}
+ generateellipsesbundle2(bundler, repo, oldinclude, oldexclude, newinclude,
+ newexclude, version, common, heads, known,
+ kwargs.get(r'depth', None))
- depth = kwargs.get(r'depth', None)
+def generateellipsesbundle2(bundler, repo, oldinclude, oldexclude, newinclude,
+ newexclude, version, common, heads, known, depth):
+ newmatch = narrowspec.match(repo.root, include=newinclude,
+ exclude=newexclude)
if depth is not None:
depth = int(depth)
if depth < 1:
@@ -73,10 +77,7 @@
heads = set(heads or repo.heads())
common = set(common or [nullid])
- oldinclude = sorted(filter(bool, kwargs.get(r'oldincludepats', [])))
- oldexclude = sorted(filter(bool, kwargs.get(r'oldexcludepats', [])))
- known = {bin(n) for n in kwargs.get(r'known', [])}
- if known and (oldinclude != include or oldexclude != exclude):
+ if known and (oldinclude != newinclude or oldexclude != newexclude):
# Steps:
# 1. Send kill for "$known & ::common"
#
--- a/hgext/narrow/narrowcommands.py Tue Jul 09 10:07:35 2019 -0400
+++ b/hgext/narrow/narrowcommands.py Mon Jul 22 14:00:33 2019 -0400
@@ -146,7 +146,7 @@
kwargs['excludepats'] = exclude
# calculate known nodes only in ellipses cases because in non-ellipses cases
# we have all the nodes
- if wireprototypes.ELLIPSESCAP in pullop.remote.capabilities():
+ if wireprototypes.ELLIPSESCAP1 in pullop.remote.capabilities():
kwargs['known'] = [node.hex(ctx.node()) for ctx in
repo.set('::%ln', pullop.common)
if ctx.node() != node.nullid]
@@ -216,7 +216,7 @@
todelete.append(f)
elif f.startswith('meta/'):
dir = f[5:-13]
- dirs = ['.'] + sorted(util.dirs({dir})) + [dir]
+ dirs = sorted(util.dirs({dir})) + [dir]
include = True
for d in dirs:
visit = newmatch.visitdir(d)
@@ -253,7 +253,14 @@
# then send that information to server whether we want ellipses or not.
# Theoretically a non-ellipses repo should be able to use narrow
# functionality from an ellipses enabled server
- ellipsesremote = wireprototypes.ELLIPSESCAP in remote.capabilities()
+ remotecap = remote.capabilities()
+ ellipsesremote = any(cap in remotecap
+ for cap in wireprototypes.SUPPORTED_ELLIPSESCAP)
+
+ # check whether we are talking to a server which supports old version of
+ # ellipses capabilities
+ isoldellipses = (ellipsesremote and wireprototypes.ELLIPSESCAP1 in
+ remotecap and wireprototypes.ELLIPSESCAP not in remotecap)
def pullbundle2extraprepare_widen(orig, pullop, kwargs):
orig(pullop, kwargs)
@@ -271,19 +278,22 @@
# silence the devel-warning of applying an empty changegroup
overrides = {('devel', 'all-warnings'): False}
+ common = commoninc[0]
with ui.uninterruptible():
- common = commoninc[0]
if ellipsesremote:
ds = repo.dirstate
p1, p2 = ds.p1(), ds.p2()
with ds.parentchange():
ds.setparents(node.nullid, node.nullid)
+ if isoldellipses:
with wrappedextraprepare:
- with repo.ui.configoverride(overrides, 'widen'):
- exchange.pull(repo, remote, heads=common)
- with ds.parentchange():
- ds.setparents(p1, p2)
+ exchange.pull(repo, remote, heads=common)
else:
+ known = []
+ if ellipsesremote:
+ known = [node.hex(ctx.node()) for ctx in
+ repo.set('::%ln', common)
+ if ctx.node() != node.nullid]
with remote.commandexecutor() as e:
bundle = e.callcommand('narrow_widen', {
'oldincludes': oldincludes,
@@ -292,15 +302,20 @@
'newexcludes': newexcludes,
'cgversion': '03',
'commonheads': common,
- 'known': [],
- 'ellipses': False,
+ 'known': known,
+ 'ellipses': ellipsesremote,
}).result()
- with repo.transaction('widening') as tr:
- with repo.ui.configoverride(overrides, 'widen'):
- tgetter = lambda: tr
- bundle2.processbundle(repo, bundle,
- transactiongetter=tgetter)
+ trmanager = exchange.transactionmanager(repo, 'widen', remote.url())
+ with trmanager, repo.ui.configoverride(overrides, 'widen'):
+ op = bundle2.bundleoperation(repo, trmanager.transaction,
+ source='widen')
+ # TODO: we should catch error.Abort here
+ bundle2.processbundle(repo, bundle, op=op)
+
+ if ellipsesremote:
+ with ds.parentchange():
+ ds.setparents(p1, p2)
with repo.transaction('widening'):
repo.setnewnarrowpats()
--- a/hgext/narrow/narrowdirstate.py Tue Jul 09 10:07:35 2019 -0400
+++ b/hgext/narrow/narrowdirstate.py Mon Jul 22 14:00:33 2019 -0400
@@ -16,21 +16,21 @@
"""Add narrow spec dirstate ignore, block changes outside narrow spec."""
def _editfunc(fn):
- def _wrapper(self, *args):
+ def _wrapper(self, *args, **kwargs):
narrowmatch = repo.narrowmatch()
for f in args:
if f is not None and not narrowmatch(f) and f not in self:
raise error.Abort(_("cannot track '%s' - it is outside " +
"the narrow clone") % f)
- return fn(self, *args)
+ return fn(self, *args, **kwargs)
return _wrapper
class narrowdirstate(dirstate.__class__):
# Prevent adding/editing/copying/deleting files that are outside the
# sparse checkout
@_editfunc
- def normal(self, *args):
- return super(narrowdirstate, self).normal(*args)
+ def normal(self, *args, **kwargs):
+ return super(narrowdirstate, self).normal(*args, **kwargs)
@_editfunc
def add(self, *args):
--- a/hgext/narrow/narrowtemplates.py Tue Jul 09 10:07:35 2019 -0400
+++ b/hgext/narrow/narrowtemplates.py Mon Jul 22 14:00:33 2019 -0400
@@ -37,7 +37,7 @@
repo = context.resource(mapping, 'repo')
ctx = context.resource(mapping, 'ctx')
m = repo.narrowmatch()
- if not m.always():
+ if ctx.files() and not m.always():
if not any(m(f) for f in ctx.files()):
return 'outsidenarrow'
return ''
--- a/hgext/narrow/narrowwirepeer.py Tue Jul 09 10:07:35 2019 -0400
+++ b/hgext/narrow/narrowwirepeer.py Mon Jul 22 14:00:33 2019 -0400
@@ -13,12 +13,15 @@
extensions,
hg,
narrowspec,
+ node as nodemod,
pycompat,
wireprototypes,
wireprotov1peer,
wireprotov1server,
)
+from . import narrowbundle2
+
def uisetup():
wireprotov1peer.wirepeer.narrow_widen = peernarrowwiden
@@ -69,21 +72,26 @@
narrowspec.validatepatterns(set(newexcludes))
common = wireprototypes.decodelist(commonheads)
- known = None
- if known:
- known = wireprototypes.decodelist(known)
+ known = wireprototypes.decodelist(known)
+ known = {nodemod.bin(n) for n in known}
if ellipses == '0':
ellipses = False
else:
ellipses = bool(ellipses)
cgversion = cgversion
- newmatch = narrowspec.match(repo.root, include=newincludes,
- exclude=newexcludes)
- oldmatch = narrowspec.match(repo.root, include=oldincludes,
- exclude=oldexcludes)
- bundler = bundle2.widen_bundle(repo, oldmatch, newmatch, common, known,
- cgversion, ellipses)
+ bundler = bundle2.bundle20(repo.ui)
+ if not ellipses:
+ newmatch = narrowspec.match(repo.root, include=newincludes,
+ exclude=newexcludes)
+ oldmatch = narrowspec.match(repo.root, include=oldincludes,
+ exclude=oldexcludes)
+ bundle2.widen_bundle(bundler, repo, oldmatch, newmatch, common,
+ known, cgversion, ellipses)
+ else:
+ narrowbundle2.generateellipsesbundle2(bundler, repo, oldincludes,
+ oldexcludes, newincludes, newexcludes, cgversion, common,
+ list(common), known, None)
except error.Abort as exc:
bundler = bundle2.bundle20(repo.ui)
manargs = [('message', pycompat.bytestr(exc))]
--- a/hgext/phabricator.py Tue Jul 09 10:07:35 2019 -0400
+++ b/hgext/phabricator.py Mon Jul 22 14:00:33 2019 -0400
@@ -65,6 +65,7 @@
scmutil,
smartset,
tags,
+ templatefilters,
templateutil,
url as urlmod,
util,
@@ -124,8 +125,28 @@
)),
]
-def vcrcommand(name, flags, spec, helpcategory=None):
+def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
fullflags = flags + _VCR_FLAGS
+ def hgmatcher(r1, r2):
+ if r1.uri != r2.uri or r1.method != r2.method:
+ return False
+ r1params = r1.body.split(b'&')
+ r2params = r2.body.split(b'&')
+ return set(r1params) == set(r2params)
+
+ def sanitiserequest(request):
+ request.body = re.sub(
+ r'cli-[a-z0-9]+',
+ r'cli-hahayouwish',
+ request.body
+ )
+ return request
+
+ def sanitiseresponse(response):
+ if r'set-cookie' in response[r'headers']:
+ del response[r'headers'][r'set-cookie']
+ return response
+
def decorate(fn):
def inner(*args, **kwargs):
cassette = pycompat.fsdecode(kwargs.pop(r'test_vcr', None))
@@ -136,18 +157,22 @@
import vcr.stubs as stubs
vcr = vcrmod.VCR(
serializer=r'json',
+ before_record_request=sanitiserequest,
+ before_record_response=sanitiseresponse,
custom_patches=[
(urlmod, r'httpconnection',
stubs.VCRHTTPConnection),
(urlmod, r'httpsconnection',
stubs.VCRHTTPSConnection),
])
- with vcr.use_cassette(cassette):
+ vcr.register_matcher(r'hgmatcher', hgmatcher)
+ with vcr.use_cassette(cassette, match_on=[r'hgmatcher']):
return fn(*args, **kwargs)
return fn(*args, **kwargs)
inner.__name__ = fn.__name__
inner.__doc__ = fn.__doc__
- return command(name, fullflags, spec, helpcategory=helpcategory)(inner)
+ return command(name, fullflags, spec, helpcategory=helpcategory,
+ optionalrepo=optionalrepo)(inner)
return decorate
def urlencodenested(params):
@@ -174,24 +199,24 @@
process(b'', params)
return util.urlreq.urlencode(flatparams)
-def readurltoken(repo):
+def readurltoken(ui):
"""return conduit url, token and make sure they exist
Currently read from [auth] config section. In the future, it might
make sense to read from .arcconfig and .arcrc as well.
"""
- url = repo.ui.config(b'phabricator', b'url')
+ url = ui.config(b'phabricator', b'url')
if not url:
raise error.Abort(_(b'config %s.%s is required')
% (b'phabricator', b'url'))
- res = httpconnectionmod.readauthforuri(repo.ui, url, util.url(url).user)
+ res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
token = None
if res:
group, auth = res
- repo.ui.debug(b"using auth.%s.* for authentication\n" % group)
+ ui.debug(b"using auth.%s.* for authentication\n" % group)
token = auth.get(b'phabtoken')
@@ -201,15 +226,15 @@
return url, token
-def callconduit(repo, name, params):
+def callconduit(ui, name, params):
"""call Conduit API, params is a dict. return json.loads result, or None"""
- host, token = readurltoken(repo)
+ host, token = readurltoken(ui)
url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
- repo.ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
+ ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
params = params.copy()
params[b'api.token'] = token
data = urlencodenested(params)
- curlcmd = repo.ui.config(b'phabricator', b'curlcmd')
+ curlcmd = ui.config(b'phabricator', b'curlcmd')
if curlcmd:
sin, sout = procutil.popen2(b'%s -d @- %s'
% (curlcmd, procutil.shellquote(url)))
@@ -217,11 +242,11 @@
sin.close()
body = sout.read()
else:
- urlopener = urlmod.opener(repo.ui, authinfo)
+ urlopener = urlmod.opener(ui, authinfo)
request = util.urlreq.request(pycompat.strurl(url), data=data)
with contextlib.closing(urlopener.open(request)) as rsp:
body = rsp.read()
- repo.ui.debug(b'Conduit Response: %s\n' % body)
+ ui.debug(b'Conduit Response: %s\n' % body)
parsed = pycompat.rapply(
lambda x: encoding.unitolocal(x) if isinstance(x, pycompat.unicode)
else x,
@@ -233,7 +258,7 @@
raise error.Abort(msg)
return parsed[b'result']
-@vcrcommand(b'debugcallconduit', [], _(b'METHOD'))
+@vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
def debugcallconduit(ui, repo, name):
"""call Conduit API
@@ -250,7 +275,7 @@
# json.dumps only accepts unicode strings
result = pycompat.rapply(lambda x:
encoding.unifromlocal(x) if isinstance(x, bytes) else x,
- callconduit(repo, name, params)
+ callconduit(ui, name, params)
)
s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
ui.write(b'%s\n' % encoding.unitolocal(s))
@@ -264,7 +289,7 @@
callsign = repo.ui.config(b'phabricator', b'callsign')
if not callsign:
return None
- query = callconduit(repo, b'diffusion.repository.search',
+ query = callconduit(repo.ui, b'diffusion.repository.search',
{b'constraints': {b'callsigns': [callsign]}})
if len(query[b'data']) == 0:
return None
@@ -320,7 +345,7 @@
# Phabricator, and expect precursors overlap with it.
if toconfirm:
drevs = [drev for force, precs, drev in toconfirm.values()]
- alldiffs = callconduit(unfi, b'differential.querydiffs',
+ alldiffs = callconduit(unfi.ui, b'differential.querydiffs',
{b'revisionIDs': drevs})
getnode = lambda d: bin(
getdiffmeta(d).get(b'node', b'')) or None
@@ -370,7 +395,7 @@
params = {b'diff': getdiff(ctx, mdiff.diffopts(git=True, context=32767))}
if repophid:
params[b'repositoryPHID'] = repophid
- diff = callconduit(repo, b'differential.createrawdiff', params)
+ diff = callconduit(repo.ui, b'differential.createrawdiff', params)
if not diff:
raise error.Abort(_(b'cannot create diff for %s') % ctx)
return diff
@@ -380,35 +405,39 @@
params = {
b'diff_id': diff[b'id'],
b'name': b'hg:meta',
- b'data': json.dumps({
- u'user': encoding.unifromlocal(ctx.user()),
- u'date': u'{:.0f} {}'.format(*ctx.date()),
- u'node': encoding.unifromlocal(ctx.hex()),
- u'parent': encoding.unifromlocal(ctx.p1().hex()),
+ b'data': templatefilters.json({
+ b'user': ctx.user(),
+ b'date': b'%d %d' % ctx.date(),
+ b'branch': ctx.branch(),
+ b'node': ctx.hex(),
+ b'parent': ctx.p1().hex(),
}),
}
- callconduit(ctx.repo(), b'differential.setdiffproperty', params)
+ callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
params = {
b'diff_id': diff[b'id'],
b'name': b'local:commits',
- b'data': json.dumps({
- encoding.unifromlocal(ctx.hex()): {
- u'author': encoding.unifromlocal(stringutil.person(ctx.user())),
- u'authorEmail': encoding.unifromlocal(
- stringutil.email(ctx.user())),
- u'time': u'{:.0f}'.format(ctx.date()[0]),
+ b'data': templatefilters.json({
+ ctx.hex(): {
+ b'author': stringutil.person(ctx.user()),
+ b'authorEmail': stringutil.email(ctx.user()),
+ b'time': int(ctx.date()[0]),
+ b'commit': ctx.hex(),
+ b'parents': [ctx.p1().hex()],
+ b'branch': ctx.branch(),
},
}),
}
- callconduit(ctx.repo(), b'differential.setdiffproperty', params)
+ callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
-def createdifferentialrevision(ctx, revid=None, parentrevid=None, oldnode=None,
- olddiff=None, actions=None):
+def createdifferentialrevision(ctx, revid=None, parentrevphid=None,
+ oldnode=None, olddiff=None, actions=None,
+ comment=None):
"""create or update a Differential Revision
If revid is None, create a new Differential Revision, otherwise update
- revid. If parentrevid is not None, set it as a dependency.
+ revid. If parentrevphid is not None, set it as a dependency.
If oldnode is not None, check if the patch content (without commit message
and metadata) has changed before creating another diff.
@@ -427,6 +456,8 @@
if neednewdiff:
diff = creatediff(ctx)
transactions.append({b'type': b'update', b'value': diff[b'phid']})
+ if comment:
+ transactions.append({b'type': b'comment', b'value': comment})
else:
# Even if we don't need to upload a new diff because the patch content
# does not change. We might still need to update its metadata so
@@ -435,21 +466,17 @@
diff = olddiff
writediffproperties(ctx, diff)
- # Use a temporary summary to set dependency. There might be better ways but
- # I cannot find them for now. But do not do that if we are updating an
- # existing revision (revid is not None) since that introduces visible
- # churns (someone edited "Summary" twice) on the web page.
- if parentrevid and revid is None:
- summary = b'Depends on D%d' % parentrevid
- transactions += [{b'type': b'summary', b'value': summary},
- {b'type': b'summary', b'value': b' '}]
+ # Set the parent Revision every time, so commit re-ordering is picked-up
+ if parentrevphid:
+ transactions.append({b'type': b'parents.set',
+ b'value': [parentrevphid]})
if actions:
transactions += actions
# Parse commit message and update related fields.
desc = ctx.description()
- info = callconduit(repo, b'differential.parsecommitmessage',
+ info = callconduit(repo.ui, b'differential.parsecommitmessage',
{b'corpus': desc})
for k, v in info[b'fields'].items():
if k in [b'title', b'summary', b'testPlan']:
@@ -460,7 +487,7 @@
# Update an existing Differential Revision
params[b'objectIdentifier'] = revid
- revision = callconduit(repo, b'differential.revision.edit', params)
+ revision = callconduit(repo.ui, b'differential.revision.edit', params)
if not revision:
raise error.Abort(_(b'cannot create revision for %s') % ctx)
@@ -470,7 +497,7 @@
"""convert user names to PHIDs"""
names = [name.lower() for name in names]
query = {b'constraints': {b'usernames': names}}
- result = callconduit(repo, b'user.search', query)
+ result = callconduit(repo.ui, b'user.search', query)
# username not found is not an error of the API. So check if we have missed
# some names here.
data = result[b'data']
@@ -485,6 +512,9 @@
[(b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
(b'', b'amend', True, _(b'update commit messages')),
(b'', b'reviewer', [], _(b'specify reviewers')),
+ (b'', b'blocker', [], _(b'specify blocking reviewers')),
+ (b'm', b'comment', b'',
+ _(b'add a comment to Revisions with new/updated Diffs')),
(b'', b'confirm', None, _(b'ask for confirmation before sending'))],
_(b'REV [OPTIONS]'),
helpcategory=command.CATEGORY_IMPORT_EXPORT)
@@ -536,16 +566,23 @@
actions = []
reviewers = opts.get(b'reviewer', [])
+ blockers = opts.get(b'blocker', [])
+ phids = []
if reviewers:
- phids = userphids(repo, reviewers)
+ phids.extend(userphids(repo, reviewers))
+ if blockers:
+ phids.extend(map(
+ lambda phid: b'blocking(%s)' % phid, userphids(repo, blockers)
+ ))
+ if phids:
actions.append({b'type': b'reviewers.add', b'value': phids})
drevids = [] # [int]
diffmap = {} # {newnode: diff}
- # Send patches one by one so we know their Differential Revision IDs and
+ # Send patches one by one so we know their Differential Revision PHIDs and
# can provide dependency relationship
- lastrevid = None
+ lastrevphid = None
for rev in revs:
ui.debug(b'sending rev %d\n' % rev)
ctx = repo[rev]
@@ -555,9 +592,11 @@
if oldnode != ctx.node() or opts.get(b'amend'):
# Create or update Differential Revision
revision, diff = createdifferentialrevision(
- ctx, revid, lastrevid, oldnode, olddiff, actions)
+ ctx, revid, lastrevphid, oldnode, olddiff, actions,
+ opts.get(b'comment'))
diffmap[ctx.node()] = diff
newrevid = int(revision[b'object'][b'id'])
+ newrevphid = revision[b'object'][b'phid']
if revid:
action = b'updated'
else:
@@ -571,8 +610,9 @@
tags.tag(repo, tagname, ctx.node(), message=None, user=None,
date=None, local=True)
else:
- # Nothing changed. But still set "newrevid" so the next revision
- # could depend on this one.
+ # Nothing changed. But still set "newrevphid" so the next revision
+ # could depend on this one and "newrevid" for the summary line.
+ newrevphid = querydrev(repo, str(revid))[0][b'phid']
newrevid = revid
action = b'skipped'
@@ -587,12 +627,12 @@
ui.write(_(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc,
desc))
drevids.append(newrevid)
- lastrevid = newrevid
+ lastrevphid = newrevphid
# Update commit messages and remove tags
if opts.get(b'amend'):
unfi = repo.unfiltered()
- drevs = callconduit(repo, b'differential.query', {b'ids': drevids})
+ drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
wnode = unfi[b'.'].node()
mapping = {} # {oldnode: [newnode]}
@@ -632,10 +672,11 @@
# Map from "hg:meta" keys to header understood by "hg import". The order is
# consistent with "hg export" output.
_metanamemap = util.sortdict([(b'user', b'User'), (b'date', b'Date'),
- (b'node', b'Node ID'), (b'parent', b'Parent ')])
+ (b'branch', b'Branch'), (b'node', b'Node ID'),
+ (b'parent', b'Parent ')])
def _confirmbeforesend(repo, revs, oldmap):
- url, token = readurltoken(repo)
+ url, token = readurltoken(repo.ui)
ui = repo.ui
for rev in revs:
ctx = repo[rev]
@@ -777,7 +818,7 @@
key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
if key in prefetched:
return prefetched[key]
- drevs = callconduit(repo, b'differential.query', params)
+ drevs = callconduit(repo.ui, b'differential.query', params)
# Fill prefetched with the result
for drev in drevs:
prefetched[drev[b'phid']] = drev
@@ -901,16 +942,31 @@
"""
props = diff.get(b'properties') or {}
meta = props.get(b'hg:meta')
- if not meta and props.get(b'local:commits'):
- commit = sorted(props[b'local:commits'].values())[0]
- meta = {
- b'date': b'%d 0' % commit[b'time'],
- b'node': commit[b'rev'],
- b'user': b'%s <%s>' % (commit[b'author'], commit[b'authorEmail']),
- }
- if len(commit.get(b'parents', ())) >= 1:
- meta[b'parent'] = commit[b'parents'][0]
- return meta or {}
+ if not meta:
+ if props.get(b'local:commits'):
+ commit = sorted(props[b'local:commits'].values())[0]
+ meta = {}
+ if b'author' in commit and b'authorEmail' in commit:
+ meta[b'user'] = b'%s <%s>' % (commit[b'author'],
+ commit[b'authorEmail'])
+ if b'time' in commit:
+ meta[b'date'] = b'%d 0' % int(commit[b'time'])
+ if b'branch' in commit:
+ meta[b'branch'] = commit[b'branch']
+ node = commit.get(b'commit', commit.get(b'rev'))
+ if node:
+ meta[b'node'] = node
+ if len(commit.get(b'parents', ())) >= 1:
+ meta[b'parent'] = commit[b'parents'][0]
+ else:
+ meta = {}
+ if b'date' not in meta and b'dateCreated' in diff:
+ meta[b'date'] = b'%s 0' % diff[b'dateCreated']
+ if b'branch' not in meta and diff.get(b'branch'):
+ meta[b'branch'] = diff[b'branch']
+ if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
+ meta[b'parent'] = diff[b'sourceControlBaseRevision']
+ return meta
def readpatch(repo, drevs, write):
"""generate plain-text patch readable by 'hg import'
@@ -920,14 +976,14 @@
"""
# Prefetch hg:meta property for all diffs
diffids = sorted(set(max(int(v) for v in drev[b'diffs']) for drev in drevs))
- diffs = callconduit(repo, b'differential.querydiffs', {b'ids': diffids})
+ diffs = callconduit(repo.ui, b'differential.querydiffs', {b'ids': diffids})
# Generate patch for each drev
for drev in drevs:
repo.ui.note(_(b'reading D%s\n') % drev[b'id'])
diffid = max(int(v) for v in drev[b'diffs'])
- body = callconduit(repo, b'differential.getrawdiff',
+ body = callconduit(repo.ui, b'differential.getrawdiff',
{b'diffID': diffid})
desc = getdescfromdrev(drev)
header = b'# HG changeset patch\n'
@@ -1001,7 +1057,7 @@
if actions:
params = {b'objectIdentifier': drev[b'phid'],
b'transactions': actions}
- callconduit(repo, b'differential.revision.edit', params)
+ callconduit(ui, b'differential.revision.edit', params)
templatekeyword = registrar.templatekeyword()
--- a/hgext/rebase.py Tue Jul 09 10:07:35 2019 -0400
+++ b/hgext/rebase.py Mon Jul 22 14:00:33 2019 -0400
@@ -108,7 +108,9 @@
@revsetpredicate('_destautoorphanrebase')
def _revsetdestautoorphanrebase(repo, subset, x):
- """automatic rebase destination for a single orphan revision"""
+ # ``_destautoorphanrebase()``
+
+ # automatic rebase destination for a single orphan revision.
unfi = repo.unfiltered()
obsoleted = unfi.revs('obsolete()')
@@ -848,8 +850,9 @@
singletransaction = True
By default, rebase writes to the working copy, but you can configure it to
- run in-memory for for better performance, and to allow it to run if the
- working copy is dirty::
+ run in-memory for better performance. When the rebase is not moving the
+ parent(s) of the working copy (AKA the "currently checked out changesets"),
+ this may also allow it to run even if the working copy is dirty::
[rebase]
experimental.inmemory = True
@@ -1819,7 +1822,7 @@
ui.debug('--update and --rebase are not compatible, ignoring '
'the update flag\n')
- cmdutil.checkunfinished(repo)
+ cmdutil.checkunfinished(repo, skipmerge=True)
cmdutil.bailifchanged(repo, hint=_('cannot pull with rebase: '
'please commit or shelve your changes first'))
@@ -1920,6 +1923,22 @@
obsoleteextinctsuccessors,
)
+def abortrebase(ui, repo):
+ with repo.wlock(), repo.lock():
+ rbsrt = rebaseruntime(repo, ui)
+ rbsrt._prepareabortorcontinue(isabort=True)
+
+def continuerebase(ui, repo):
+ with repo.wlock(), repo.lock():
+ rbsrt = rebaseruntime(repo, ui)
+ ms = mergemod.mergestate.read(repo)
+ mergeutil.checkunresolved(ms)
+ retcode = rbsrt._prepareabortorcontinue(isabort=False)
+ if retcode is not None:
+ return retcode
+ rbsrt._performrebase(None)
+ rbsrt._finishrebase()
+
def summaryhook(ui, repo):
if not repo.vfs.exists('rebasestate'):
return
@@ -1947,8 +1966,6 @@
entry[1].append(('t', 'tool', '',
_("specify merge tool for rebase")))
cmdutil.summaryhooks.add('rebase', summaryhook)
- cmdutil.unfinishedstates.append(
- ['rebasestate', False, False, _('rebase in progress'),
- _("use 'hg rebase --continue' or 'hg rebase --abort'")])
- cmdutil.afterresolvedstates.append(
- ['rebasestate', _('hg rebase --continue')])
+ statemod.addunfinished('rebase', fname='rebasestate', stopflag=True,
+ continueflag=True, abortfunc=abortrebase,
+ continuefunc=continuerebase)
--- a/hgext/remotefilelog/__init__.py Tue Jul 09 10:07:35 2019 -0400
+++ b/hgext/remotefilelog/__init__.py Mon Jul 22 14:00:33 2019 -0400
@@ -293,6 +293,35 @@
# debugdata needs remotefilelog.len to work
extensions.wrapcommand(commands.table, 'debugdata', debugdatashallow)
+ changegroup.cgpacker = shallowbundle.shallowcg1packer
+
+ extensions.wrapfunction(changegroup, '_addchangegroupfiles',
+ shallowbundle.addchangegroupfiles)
+ extensions.wrapfunction(
+ changegroup, 'makechangegroup', shallowbundle.makechangegroup)
+ extensions.wrapfunction(localrepo, 'makestore', storewrapper)
+ extensions.wrapfunction(exchange, 'pull', exchangepull)
+ extensions.wrapfunction(merge, 'applyupdates', applyupdates)
+ extensions.wrapfunction(merge, '_checkunknownfiles', checkunknownfiles)
+ extensions.wrapfunction(context.workingctx, '_checklookup', checklookup)
+ extensions.wrapfunction(scmutil, '_findrenames', findrenames)
+ extensions.wrapfunction(copies, '_computeforwardmissing',
+ computeforwardmissing)
+ extensions.wrapfunction(dispatch, 'runcommand', runcommand)
+ extensions.wrapfunction(repair, '_collectbrokencsets', _collectbrokencsets)
+ extensions.wrapfunction(context.changectx, 'filectx', filectx)
+ extensions.wrapfunction(context.workingctx, 'filectx', workingfilectx)
+ extensions.wrapfunction(patch, 'trydiff', trydiff)
+ extensions.wrapfunction(hg, 'verify', _verify)
+ scmutil.fileprefetchhooks.add('remotefilelog', _fileprefetchhook)
+
+ # disappointing hacks below
+ extensions.wrapfunction(scmutil, 'getrenamedfn', getrenamedfn)
+ extensions.wrapfunction(revset, 'filelog', filelogrevset)
+ revset.symbols['filelog'] = revset.filelog
+ extensions.wrapfunction(cmdutil, 'walkfilerevs', walkfilerevs)
+
+
def cloneshallow(orig, ui, repo, *args, **opts):
if opts.get(r'shallow'):
repos = []
@@ -405,6 +434,158 @@
shallowrepo.wraprepo(repo)
repo.store = shallowstore.wrapstore(repo.store)
+def storewrapper(orig, requirements, path, vfstype):
+ s = orig(requirements, path, vfstype)
+ if constants.SHALLOWREPO_REQUIREMENT in requirements:
+ s = shallowstore.wrapstore(s)
+
+ return s
+
+# prefetch files before update
+def applyupdates(orig, repo, actions, wctx, mctx, overwrite, wantfiledata,
+ labels=None):
+ if isenabled(repo):
+ manifest = mctx.manifest()
+ files = []
+ for f, args, msg in actions['g']:
+ files.append((f, hex(manifest[f])))
+ # batch fetch the needed files from the server
+ repo.fileservice.prefetch(files)
+ return orig(repo, actions, wctx, mctx, overwrite, wantfiledata,
+ labels=labels)
+
+# Prefetch merge checkunknownfiles
+def checkunknownfiles(orig, repo, wctx, mctx, force, actions,
+ *args, **kwargs):
+ if isenabled(repo):
+ files = []
+ sparsematch = repo.maybesparsematch(mctx.rev())
+ for f, (m, actionargs, msg) in actions.iteritems():
+ if sparsematch and not sparsematch(f):
+ continue
+ if m in ('c', 'dc', 'cm'):
+ files.append((f, hex(mctx.filenode(f))))
+ elif m == 'dg':
+ f2 = actionargs[0]
+ files.append((f2, hex(mctx.filenode(f2))))
+ # batch fetch the needed files from the server
+ repo.fileservice.prefetch(files)
+ return orig(repo, wctx, mctx, force, actions, *args, **kwargs)
+
+# Prefetch files before status attempts to look at their size and contents
+def checklookup(orig, self, files):
+ repo = self._repo
+ if isenabled(repo):
+ prefetchfiles = []
+ for parent in self._parents:
+ for f in files:
+ if f in parent:
+ prefetchfiles.append((f, hex(parent.filenode(f))))
+ # batch fetch the needed files from the server
+ repo.fileservice.prefetch(prefetchfiles)
+ return orig(self, files)
+
+# Prefetch the logic that compares added and removed files for renames
+def findrenames(orig, repo, matcher, added, removed, *args, **kwargs):
+ if isenabled(repo):
+ files = []
+ pmf = repo['.'].manifest()
+ for f in removed:
+ if f in pmf:
+ files.append((f, hex(pmf[f])))
+ # batch fetch the needed files from the server
+ repo.fileservice.prefetch(files)
+ return orig(repo, matcher, added, removed, *args, **kwargs)
+
+# prefetch files before pathcopies check
+def computeforwardmissing(orig, a, b, match=None):
+ missing = orig(a, b, match=match)
+ repo = a._repo
+ if isenabled(repo):
+ mb = b.manifest()
+
+ files = []
+ sparsematch = repo.maybesparsematch(b.rev())
+ if sparsematch:
+ sparsemissing = set()
+ for f in missing:
+ if sparsematch(f):
+ files.append((f, hex(mb[f])))
+ sparsemissing.add(f)
+ missing = sparsemissing
+
+ # batch fetch the needed files from the server
+ repo.fileservice.prefetch(files)
+ return missing
+
+# close cache miss server connection after the command has finished
+def runcommand(orig, lui, repo, *args, **kwargs):
+ fileservice = None
+ # repo can be None when running in chg:
+ # - at startup, reposetup was called because serve is not norepo
+ # - a norepo command like "help" is called
+ if repo and isenabled(repo):
+ fileservice = repo.fileservice
+ try:
+ return orig(lui, repo, *args, **kwargs)
+ finally:
+ if fileservice:
+ fileservice.close()
+
+# prevent strip from stripping remotefilelogs
+def _collectbrokencsets(orig, repo, files, striprev):
+ if isenabled(repo):
+ files = list([f for f in files if not repo.shallowmatch(f)])
+ return orig(repo, files, striprev)
+
+# changectx wrappers
+def filectx(orig, self, path, fileid=None, filelog=None):
+ if fileid is None:
+ fileid = self.filenode(path)
+ if (isenabled(self._repo) and self._repo.shallowmatch(path)):
+ return remotefilectx.remotefilectx(self._repo, path, fileid=fileid,
+ changectx=self, filelog=filelog)
+ return orig(self, path, fileid=fileid, filelog=filelog)
+
+def workingfilectx(orig, self, path, filelog=None):
+ if (isenabled(self._repo) and self._repo.shallowmatch(path)):
+ return remotefilectx.remoteworkingfilectx(self._repo, path,
+ workingctx=self,
+ filelog=filelog)
+ return orig(self, path, filelog=filelog)
+
+# prefetch required revisions before a diff
+def trydiff(orig, repo, revs, ctx1, ctx2, modified, added, removed,
+ copy, getfilectx, *args, **kwargs):
+ if isenabled(repo):
+ prefetch = []
+ mf1 = ctx1.manifest()
+ for fname in modified + added + removed:
+ if fname in mf1:
+ fnode = getfilectx(fname, ctx1).filenode()
+ # fnode can be None if it's a edited working ctx file
+ if fnode:
+ prefetch.append((fname, hex(fnode)))
+ if fname not in removed:
+ fnode = getfilectx(fname, ctx2).filenode()
+ if fnode:
+ prefetch.append((fname, hex(fnode)))
+
+ repo.fileservice.prefetch(prefetch)
+
+ return orig(repo, revs, ctx1, ctx2, modified, added, removed, copy,
+ getfilectx, *args, **kwargs)
+
+# Prevent verify from processing files
+# a stub for mercurial.hg.verify()
+def _verify(orig, repo, level=None):
+ lock = repo.lock()
+ try:
+ return shallowverifier.shallowverifier(repo).verify()
+ finally:
+ lock.release()
+
+
clientonetime = False
def onetimeclientsetup(ui):
global clientonetime
@@ -412,163 +593,6 @@
return
clientonetime = True
- changegroup.cgpacker = shallowbundle.shallowcg1packer
-
- extensions.wrapfunction(changegroup, '_addchangegroupfiles',
- shallowbundle.addchangegroupfiles)
- extensions.wrapfunction(
- changegroup, 'makechangegroup', shallowbundle.makechangegroup)
-
- def storewrapper(orig, requirements, path, vfstype):
- s = orig(requirements, path, vfstype)
- if constants.SHALLOWREPO_REQUIREMENT in requirements:
- s = shallowstore.wrapstore(s)
-
- return s
- extensions.wrapfunction(localrepo, 'makestore', storewrapper)
-
- extensions.wrapfunction(exchange, 'pull', exchangepull)
-
- # prefetch files before update
- def applyupdates(orig, repo, actions, wctx, mctx, overwrite, labels=None):
- if isenabled(repo):
- manifest = mctx.manifest()
- files = []
- for f, args, msg in actions['g']:
- files.append((f, hex(manifest[f])))
- # batch fetch the needed files from the server
- repo.fileservice.prefetch(files)
- return orig(repo, actions, wctx, mctx, overwrite, labels=labels)
- extensions.wrapfunction(merge, 'applyupdates', applyupdates)
-
- # Prefetch merge checkunknownfiles
- def checkunknownfiles(orig, repo, wctx, mctx, force, actions,
- *args, **kwargs):
- if isenabled(repo):
- files = []
- sparsematch = repo.maybesparsematch(mctx.rev())
- for f, (m, actionargs, msg) in actions.iteritems():
- if sparsematch and not sparsematch(f):
- continue
- if m in ('c', 'dc', 'cm'):
- files.append((f, hex(mctx.filenode(f))))
- elif m == 'dg':
- f2 = actionargs[0]
- files.append((f2, hex(mctx.filenode(f2))))
- # batch fetch the needed files from the server
- repo.fileservice.prefetch(files)
- return orig(repo, wctx, mctx, force, actions, *args, **kwargs)
- extensions.wrapfunction(merge, '_checkunknownfiles', checkunknownfiles)
-
- # Prefetch files before status attempts to look at their size and contents
- def checklookup(orig, self, files):
- repo = self._repo
- if isenabled(repo):
- prefetchfiles = []
- for parent in self._parents:
- for f in files:
- if f in parent:
- prefetchfiles.append((f, hex(parent.filenode(f))))
- # batch fetch the needed files from the server
- repo.fileservice.prefetch(prefetchfiles)
- return orig(self, files)
- extensions.wrapfunction(context.workingctx, '_checklookup', checklookup)
-
- # Prefetch the logic that compares added and removed files for renames
- def findrenames(orig, repo, matcher, added, removed, *args, **kwargs):
- if isenabled(repo):
- files = []
- pmf = repo['.'].manifest()
- for f in removed:
- if f in pmf:
- files.append((f, hex(pmf[f])))
- # batch fetch the needed files from the server
- repo.fileservice.prefetch(files)
- return orig(repo, matcher, added, removed, *args, **kwargs)
- extensions.wrapfunction(scmutil, '_findrenames', findrenames)
-
- # prefetch files before mergecopies check
- def computenonoverlap(orig, repo, c1, c2, *args, **kwargs):
- u1, u2 = orig(repo, c1, c2, *args, **kwargs)
- if isenabled(repo):
- m1 = c1.manifest()
- m2 = c2.manifest()
- files = []
-
- sparsematch1 = repo.maybesparsematch(c1.rev())
- if sparsematch1:
- sparseu1 = set()
- for f in u1:
- if sparsematch1(f):
- files.append((f, hex(m1[f])))
- sparseu1.add(f)
- u1 = sparseu1
-
- sparsematch2 = repo.maybesparsematch(c2.rev())
- if sparsematch2:
- sparseu2 = set()
- for f in u2:
- if sparsematch2(f):
- files.append((f, hex(m2[f])))
- sparseu2.add(f)
- u2 = sparseu2
-
- # batch fetch the needed files from the server
- repo.fileservice.prefetch(files)
- return u1, u2
- extensions.wrapfunction(copies, '_computenonoverlap', computenonoverlap)
-
- # prefetch files before pathcopies check
- def computeforwardmissing(orig, a, b, match=None):
- missing = orig(a, b, match=match)
- repo = a._repo
- if isenabled(repo):
- mb = b.manifest()
-
- files = []
- sparsematch = repo.maybesparsematch(b.rev())
- if sparsematch:
- sparsemissing = set()
- for f in missing:
- if sparsematch(f):
- files.append((f, hex(mb[f])))
- sparsemissing.add(f)
- missing = sparsemissing
-
- # batch fetch the needed files from the server
- repo.fileservice.prefetch(files)
- return missing
- extensions.wrapfunction(copies, '_computeforwardmissing',
- computeforwardmissing)
-
- # close cache miss server connection after the command has finished
- def runcommand(orig, lui, repo, *args, **kwargs):
- fileservice = None
- # repo can be None when running in chg:
- # - at startup, reposetup was called because serve is not norepo
- # - a norepo command like "help" is called
- if repo and isenabled(repo):
- fileservice = repo.fileservice
- try:
- return orig(lui, repo, *args, **kwargs)
- finally:
- if fileservice:
- fileservice.close()
- extensions.wrapfunction(dispatch, 'runcommand', runcommand)
-
- # disappointing hacks below
- scmutil.getrenamedfn = getrenamedfn
- extensions.wrapfunction(revset, 'filelog', filelogrevset)
- revset.symbols['filelog'] = revset.filelog
- extensions.wrapfunction(cmdutil, 'walkfilerevs', walkfilerevs)
-
- # prevent strip from stripping remotefilelogs
- def _collectbrokencsets(orig, repo, files, striprev):
- if isenabled(repo):
- files = list([f for f in files if not repo.shallowmatch(f)])
- return orig(repo, files, striprev)
- extensions.wrapfunction(repair, '_collectbrokencsets', _collectbrokencsets)
-
# Don't commit filelogs until we know the commit hash, since the hash
# is present in the filelog blob.
# This violates Mercurial's filelog->manifest->changelog write order,
@@ -611,60 +635,10 @@
return node
extensions.wrapfunction(changelog.changelog, 'add', changelogadd)
- # changectx wrappers
- def filectx(orig, self, path, fileid=None, filelog=None):
- if fileid is None:
- fileid = self.filenode(path)
- if (isenabled(self._repo) and self._repo.shallowmatch(path)):
- return remotefilectx.remotefilectx(self._repo, path,
- fileid=fileid, changectx=self, filelog=filelog)
- return orig(self, path, fileid=fileid, filelog=filelog)
- extensions.wrapfunction(context.changectx, 'filectx', filectx)
-
- def workingfilectx(orig, self, path, filelog=None):
- if (isenabled(self._repo) and self._repo.shallowmatch(path)):
- return remotefilectx.remoteworkingfilectx(self._repo,
- path, workingctx=self, filelog=filelog)
- return orig(self, path, filelog=filelog)
- extensions.wrapfunction(context.workingctx, 'filectx', workingfilectx)
+def getrenamedfn(orig, repo, endrev=None):
+ if not isenabled(repo) or copies.usechangesetcentricalgo(repo):
+ return orig(repo, endrev)
- # prefetch required revisions before a diff
- def trydiff(orig, repo, revs, ctx1, ctx2, modified, added, removed,
- copy, getfilectx, *args, **kwargs):
- if isenabled(repo):
- prefetch = []
- mf1 = ctx1.manifest()
- for fname in modified + added + removed:
- if fname in mf1:
- fnode = getfilectx(fname, ctx1).filenode()
- # fnode can be None if it's a edited working ctx file
- if fnode:
- prefetch.append((fname, hex(fnode)))
- if fname not in removed:
- fnode = getfilectx(fname, ctx2).filenode()
- if fnode:
- prefetch.append((fname, hex(fnode)))
-
- repo.fileservice.prefetch(prefetch)
-
- return orig(repo, revs, ctx1, ctx2, modified, added, removed,
- copy, getfilectx, *args, **kwargs)
- extensions.wrapfunction(patch, 'trydiff', trydiff)
-
- # Prevent verify from processing files
- # a stub for mercurial.hg.verify()
- def _verify(orig, repo):
- lock = repo.lock()
- try:
- return shallowverifier.shallowverifier(repo).verify()
- finally:
- lock.release()
-
- extensions.wrapfunction(hg, 'verify', _verify)
-
- scmutil.fileprefetchhooks.add('remotefilelog', _fileprefetchhook)
-
-def getrenamedfn(repo, endrev=None):
rcache = {}
def getrenamed(fn, rev):
@@ -1019,9 +993,6 @@
mf = ctx.manifest()
sparsematch = repo.maybesparsematch(ctx.rev())
for path in ctx.walk(match):
- if path.endswith('/'):
- # Tree manifest that's being excluded as part of narrow
- continue
if (not sparsematch or sparsematch(path)) and path in mf:
allfiles.append((path, hex(mf[path])))
repo.fileservice.prefetch(allfiles)
--- a/hgext/remotefilelog/fileserverclient.py Tue Jul 09 10:07:35 2019 -0400
+++ b/hgext/remotefilelog/fileserverclient.py Mon Jul 22 14:00:33 2019 -0400
@@ -396,6 +396,9 @@
batchdefault = 10
batchsize = self.ui.configint(
'remotefilelog', 'batchsize', batchdefault)
+ self.ui.debug(
+ b'requesting %d files from '
+ b'remotefilelog server...\n' % len(missed))
_getfilesbatch(
remote, self.receivemissing, progress.increment,
missed, idmap, batchsize)
--- a/hgext/remotefilelog/repack.py Tue Jul 09 10:07:35 2019 -0400
+++ b/hgext/remotefilelog/repack.py Mon Jul 22 14:00:33 2019 -0400
@@ -43,7 +43,8 @@
if packsonly:
cmd.append('--packsonly')
repo.ui.warn(msg)
- procutil.runbgcommand(cmd, encoding.environ)
+ # We know this command will find a binary, so don't block on it starting.
+ procutil.runbgcommand(cmd, encoding.environ, ensurestart=False)
def fullrepack(repo, options=None):
"""If ``packsonly`` is True, stores creating only loose objects are skipped.
--- a/hgext/remotefilelog/shallowrepo.py Tue Jul 09 10:07:35 2019 -0400
+++ b/hgext/remotefilelog/shallowrepo.py Mon Jul 22 14:00:33 2019 -0400
@@ -33,13 +33,6 @@
shallowutil,
)
-if util.safehasattr(util, '_hgexecutable'):
- # Before 5be286db
- _hgexecutable = util.hgexecutable
-else:
- from mercurial.utils import procutil
- _hgexecutable = procutil.hgexecutable
-
# These make*stores functions are global so that other extensions can replace
# them.
def makelocalstores(repo):
@@ -168,7 +161,7 @@
**kwargs)
@localrepo.unfilteredmethod
- def commitctx(self, ctx, error=False):
+ def commitctx(self, ctx, error=False, origctx=None):
"""Add a new revision to current repository.
Revision information is passed via the context argument.
"""
@@ -186,18 +179,21 @@
files.append((f, hex(fparent1)))
self.fileservice.prefetch(files)
return super(shallowrepository, self).commitctx(ctx,
- error=error)
+ error=error,
+ origctx=origctx)
def backgroundprefetch(self, revs, base=None, repack=False, pats=None,
opts=None):
"""Runs prefetch in background with optional repack
"""
- cmd = [_hgexecutable(), '-R', repo.origroot, 'prefetch']
+ cmd = [procutil.hgexecutable(), '-R', repo.origroot, 'prefetch']
if repack:
cmd.append('--repack')
if revs:
cmd += ['-r', revs]
- procutil.runbgcommand(cmd, encoding.environ)
+ # We know this command will find a binary, so don't block
+ # on it starting.
+ procutil.runbgcommand(cmd, encoding.environ, ensurestart=False)
def prefetch(self, revs, base=None, pats=None, opts=None):
"""Prefetches all the necessary file revisions for the given revs
--- a/hgext/remotenames.py Tue Jul 09 10:07:35 2019 -0400
+++ b/hgext/remotenames.py Mon Jul 22 14:00:33 2019 -0400
@@ -167,6 +167,8 @@
for k, vtup in self.potentialentries.iteritems():
yield (k, [bin(vtup[0])])
+ items = iteritems
+
class remotenames(object):
"""
This class encapsulates all the remotenames state. It also contains
--- a/hgext/share.py Tue Jul 09 10:07:35 2019 -0400
+++ b/hgext/share.py Mon Jul 22 14:00:33 2019 -0400
@@ -125,6 +125,10 @@
def _hassharedbookmarks(repo):
"""Returns whether this repo has shared bookmarks"""
+ if bookmarks.bookmarksinstore(repo):
+ # Kind of a lie, but it means that we skip our custom reads and writes
+ # from/to the source repo.
+ return False
try:
shared = repo.vfs.read('shared').splitlines()
except IOError as inst:
--- a/hgext/shelve.py Tue Jul 09 10:07:35 2019 -0400
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,1147 +0,0 @@
-# shelve.py - save/restore working directory state
-#
-# Copyright 2013 Facebook, Inc.
-#
-# This software may be used and distributed according to the terms of the
-# GNU General Public License version 2 or any later version.
-
-"""save and restore changes to the working directory
-
-The "hg shelve" command saves changes made to the working directory
-and reverts those changes, resetting the working directory to a clean
-state.
-
-Later on, the "hg unshelve" command restores the changes saved by "hg
-shelve". Changes can be restored even after updating to a different
-parent, in which case Mercurial's merge machinery will resolve any
-conflicts if necessary.
-
-You can have more than one shelved change outstanding at a time; each
-shelved change has a distinct name. For details, see the help for "hg
-shelve".
-"""
-from __future__ import absolute_import
-
-import collections
-import errno
-import itertools
-import stat
-
-from mercurial.i18n import _
-from mercurial import (
- bookmarks,
- bundle2,
- bundlerepo,
- changegroup,
- cmdutil,
- discovery,
- error,
- exchange,
- hg,
- lock as lockmod,
- mdiff,
- merge,
- node as nodemod,
- patch,
- phases,
- pycompat,
- registrar,
- repair,
- scmutil,
- templatefilters,
- util,
- vfs as vfsmod,
-)
-
-from . import (
- rebase,
-)
-from mercurial.utils import (
- dateutil,
- stringutil,
-)
-
-cmdtable = {}
-command = registrar.command(cmdtable)
-# Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
-# extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
-# be specifying the version(s) of Mercurial they are tested with, or
-# leave the attribute unspecified.
-testedwith = 'ships-with-hg-core'
-
-configtable = {}
-configitem = registrar.configitem(configtable)
-
-configitem('shelve', 'maxbackups',
- default=10,
-)
-
-backupdir = 'shelve-backup'
-shelvedir = 'shelved'
-shelvefileextensions = ['hg', 'patch', 'shelve']
-# universal extension is present in all types of shelves
-patchextension = 'patch'
-
-# we never need the user, so we use a
-# generic user for all shelve operations
-shelveuser = 'shelve@localhost'
-
-class shelvedfile(object):
- """Helper for the file storing a single shelve
-
- Handles common functions on shelve files (.hg/.patch) using
- the vfs layer"""
- def __init__(self, repo, name, filetype=None):
- self.repo = repo
- self.name = name
- self.vfs = vfsmod.vfs(repo.vfs.join(shelvedir))
- self.backupvfs = vfsmod.vfs(repo.vfs.join(backupdir))
- self.ui = self.repo.ui
- if filetype:
- self.fname = name + '.' + filetype
- else:
- self.fname = name
-
- def exists(self):
- return self.vfs.exists(self.fname)
-
- def filename(self):
- return self.vfs.join(self.fname)
-
- def backupfilename(self):
- def gennames(base):
- yield base
- base, ext = base.rsplit('.', 1)
- for i in itertools.count(1):
- yield '%s-%d.%s' % (base, i, ext)
-
- name = self.backupvfs.join(self.fname)
- for n in gennames(name):
- if not self.backupvfs.exists(n):
- return n
-
- def movetobackup(self):
- if not self.backupvfs.isdir():
- self.backupvfs.makedir()
- util.rename(self.filename(), self.backupfilename())
-
- def stat(self):
- return self.vfs.stat(self.fname)
-
- def opener(self, mode='rb'):
- try:
- return self.vfs(self.fname, mode)
- except IOError as err:
- if err.errno != errno.ENOENT:
- raise
- raise error.Abort(_("shelved change '%s' not found") % self.name)
-
- def applybundle(self, tr):
- fp = self.opener()
- try:
- targetphase = phases.internal
- if not phases.supportinternal(self.repo):
- targetphase = phases.secret
- gen = exchange.readbundle(self.repo.ui, fp, self.fname, self.vfs)
- pretip = self.repo['tip']
- bundle2.applybundle(self.repo, gen, tr,
- source='unshelve',
- url='bundle:' + self.vfs.join(self.fname),
- targetphase=targetphase)
- shelvectx = self.repo['tip']
- if pretip == shelvectx:
- shelverev = tr.changes['revduplicates'][-1]
- shelvectx = self.repo[shelverev]
- return shelvectx
- finally:
- fp.close()
-
- def bundlerepo(self):
- path = self.vfs.join(self.fname)
- return bundlerepo.instance(self.repo.baseui,
- 'bundle://%s+%s' % (self.repo.root, path))
-
- def writebundle(self, bases, node):
- cgversion = changegroup.safeversion(self.repo)
- if cgversion == '01':
- btype = 'HG10BZ'
- compression = None
- else:
- btype = 'HG20'
- compression = 'BZ'
-
- repo = self.repo.unfiltered()
-
- outgoing = discovery.outgoing(repo, missingroots=bases,
- missingheads=[node])
- cg = changegroup.makechangegroup(repo, outgoing, cgversion, 'shelve')
-
- bundle2.writebundle(self.ui, cg, self.fname, btype, self.vfs,
- compression=compression)
-
- def writeinfo(self, info):
- scmutil.simplekeyvaluefile(self.vfs, self.fname).write(info)
-
- def readinfo(self):
- return scmutil.simplekeyvaluefile(self.vfs, self.fname).read()
-
-class shelvedstate(object):
- """Handle persistence during unshelving operations.
-
- Handles saving and restoring a shelved state. Ensures that different
- versions of a shelved state are possible and handles them appropriately.
- """
- _version = 2
- _filename = 'shelvedstate'
- _keep = 'keep'
- _nokeep = 'nokeep'
- # colon is essential to differentiate from a real bookmark name
- _noactivebook = ':no-active-bookmark'
-
- @classmethod
- def _verifyandtransform(cls, d):
- """Some basic shelvestate syntactic verification and transformation"""
- try:
- d['originalwctx'] = nodemod.bin(d['originalwctx'])
- d['pendingctx'] = nodemod.bin(d['pendingctx'])
- d['parents'] = [nodemod.bin(h)
- for h in d['parents'].split(' ')]
- d['nodestoremove'] = [nodemod.bin(h)
- for h in d['nodestoremove'].split(' ')]
- except (ValueError, TypeError, KeyError) as err:
- raise error.CorruptedState(pycompat.bytestr(err))
-
- @classmethod
- def _getversion(cls, repo):
- """Read version information from shelvestate file"""
- fp = repo.vfs(cls._filename)
- try:
- version = int(fp.readline().strip())
- except ValueError as err:
- raise error.CorruptedState(pycompat.bytestr(err))
- finally:
- fp.close()
- return version
-
- @classmethod
- def _readold(cls, repo):
- """Read the old position-based version of a shelvestate file"""
- # Order is important, because old shelvestate file uses it
- # to detemine values of fields (i.g. name is on the second line,
- # originalwctx is on the third and so forth). Please do not change.
- keys = ['version', 'name', 'originalwctx', 'pendingctx', 'parents',
- 'nodestoremove', 'branchtorestore', 'keep', 'activebook']
- # this is executed only seldomly, so it is not a big deal
- # that we open this file twice
- fp = repo.vfs(cls._filename)
- d = {}
- try:
- for key in keys:
- d[key] = fp.readline().strip()
- finally:
- fp.close()
- return d
-
- @classmethod
- def load(cls, repo):
- version = cls._getversion(repo)
- if version < cls._version:
- d = cls._readold(repo)
- elif version == cls._version:
- d = scmutil.simplekeyvaluefile(
- repo.vfs, cls._filename).read(firstlinenonkeyval=True)
- else:
- raise error.Abort(_('this version of shelve is incompatible '
- 'with the version used in this repo'))
-
- cls._verifyandtransform(d)
- try:
- obj = cls()
- obj.name = d['name']
- obj.wctx = repo[d['originalwctx']]
- obj.pendingctx = repo[d['pendingctx']]
- obj.parents = d['parents']
- obj.nodestoremove = d['nodestoremove']
- obj.branchtorestore = d.get('branchtorestore', '')
- obj.keep = d.get('keep') == cls._keep
- obj.activebookmark = ''
- if d.get('activebook', '') != cls._noactivebook:
- obj.activebookmark = d.get('activebook', '')
- except (error.RepoLookupError, KeyError) as err:
- raise error.CorruptedState(pycompat.bytestr(err))
-
- return obj
-
- @classmethod
- def save(cls, repo, name, originalwctx, pendingctx, nodestoremove,
- branchtorestore, keep=False, activebook=''):
- info = {
- "name": name,
- "originalwctx": nodemod.hex(originalwctx.node()),
- "pendingctx": nodemod.hex(pendingctx.node()),
- "parents": ' '.join([nodemod.hex(p)
- for p in repo.dirstate.parents()]),
- "nodestoremove": ' '.join([nodemod.hex(n)
- for n in nodestoremove]),
- "branchtorestore": branchtorestore,
- "keep": cls._keep if keep else cls._nokeep,
- "activebook": activebook or cls._noactivebook
- }
- scmutil.simplekeyvaluefile(
- repo.vfs, cls._filename).write(info,
- firstline=("%d" % cls._version))
-
- @classmethod
- def clear(cls, repo):
- repo.vfs.unlinkpath(cls._filename, ignoremissing=True)
-
-def cleanupoldbackups(repo):
- vfs = vfsmod.vfs(repo.vfs.join(backupdir))
- maxbackups = repo.ui.configint('shelve', 'maxbackups')
- hgfiles = [f for f in vfs.listdir()
- if f.endswith('.' + patchextension)]
- hgfiles = sorted([(vfs.stat(f)[stat.ST_MTIME], f) for f in hgfiles])
- if maxbackups > 0 and maxbackups < len(hgfiles):
- bordermtime = hgfiles[-maxbackups][0]
- else:
- bordermtime = None
- for mtime, f in hgfiles[:len(hgfiles) - maxbackups]:
- if mtime == bordermtime:
- # keep it, because timestamp can't decide exact order of backups
- continue
- base = f[:-(1 + len(patchextension))]
- for ext in shelvefileextensions:
- vfs.tryunlink(base + '.' + ext)
-
-def _backupactivebookmark(repo):
- activebookmark = repo._activebookmark
- if activebookmark:
- bookmarks.deactivate(repo)
- return activebookmark
-
-def _restoreactivebookmark(repo, mark):
- if mark:
- bookmarks.activate(repo, mark)
-
-def _aborttransaction(repo, tr):
- '''Abort current transaction for shelve/unshelve, but keep dirstate
- '''
- dirstatebackupname = 'dirstate.shelve'
- repo.dirstate.savebackup(tr, dirstatebackupname)
- tr.abort()
- repo.dirstate.restorebackup(None, dirstatebackupname)
-
-def getshelvename(repo, parent, opts):
- """Decide on the name this shelve is going to have"""
- def gennames():
- yield label
- for i in itertools.count(1):
- yield '%s-%02d' % (label, i)
- name = opts.get('name')
- label = repo._activebookmark or parent.branch() or 'default'
- # slashes aren't allowed in filenames, therefore we rename it
- label = label.replace('/', '_')
- label = label.replace('\\', '_')
- # filenames must not start with '.' as it should not be hidden
- if label.startswith('.'):
- label = label.replace('.', '_', 1)
-
- if name:
- if shelvedfile(repo, name, patchextension).exists():
- e = _("a shelved change named '%s' already exists") % name
- raise error.Abort(e)
-
- # ensure we are not creating a subdirectory or a hidden file
- if '/' in name or '\\' in name:
- raise error.Abort(_('shelved change names can not contain slashes'))
- if name.startswith('.'):
- raise error.Abort(_("shelved change names can not start with '.'"))
-
- else:
- for n in gennames():
- if not shelvedfile(repo, n, patchextension).exists():
- name = n
- break
-
- return name
-
-def mutableancestors(ctx):
- """return all mutable ancestors for ctx (included)
-
- Much faster than the revset ancestors(ctx) & draft()"""
- seen = {nodemod.nullrev}
- visit = collections.deque()
- visit.append(ctx)
- while visit:
- ctx = visit.popleft()
- yield ctx.node()
- for parent in ctx.parents():
- rev = parent.rev()
- if rev not in seen:
- seen.add(rev)
- if parent.mutable():
- visit.append(parent)
-
-def getcommitfunc(extra, interactive, editor=False):
- def commitfunc(ui, repo, message, match, opts):
- hasmq = util.safehasattr(repo, 'mq')
- if hasmq:
- saved, repo.mq.checkapplied = repo.mq.checkapplied, False
-
- targetphase = phases.internal
- if not phases.supportinternal(repo):
- targetphase = phases.secret
- overrides = {('phases', 'new-commit'): targetphase}
- try:
- editor_ = False
- if editor:
- editor_ = cmdutil.getcommiteditor(editform='shelve.shelve',
- **pycompat.strkwargs(opts))
- with repo.ui.configoverride(overrides):
- return repo.commit(message, shelveuser, opts.get('date'),
- match, editor=editor_, extra=extra)
- finally:
- if hasmq:
- repo.mq.checkapplied = saved
-
- def interactivecommitfunc(ui, repo, *pats, **opts):
- opts = pycompat.byteskwargs(opts)
- match = scmutil.match(repo['.'], pats, {})
- message = opts['message']
- return commitfunc(ui, repo, message, match, opts)
-
- return interactivecommitfunc if interactive else commitfunc
-
-def _nothingtoshelvemessaging(ui, repo, pats, opts):
- stat = repo.status(match=scmutil.match(repo[None], pats, opts))
- if stat.deleted:
- ui.status(_("nothing changed (%d missing files, see "
- "'hg status')\n") % len(stat.deleted))
- else:
- ui.status(_("nothing changed\n"))
-
-def _shelvecreatedcommit(repo, node, name, match):
- info = {'node': nodemod.hex(node)}
- shelvedfile(repo, name, 'shelve').writeinfo(info)
- bases = list(mutableancestors(repo[node]))
- shelvedfile(repo, name, 'hg').writebundle(bases, node)
- with shelvedfile(repo, name, patchextension).opener('wb') as fp:
- cmdutil.exportfile(repo, [node], fp, opts=mdiff.diffopts(git=True),
- match=match)
-
-def _includeunknownfiles(repo, pats, opts, extra):
- s = repo.status(match=scmutil.match(repo[None], pats, opts),
- unknown=True)
- if s.unknown:
- extra['shelve_unknown'] = '\0'.join(s.unknown)
- repo[None].add(s.unknown)
-
-def _finishshelve(repo, tr):
- if phases.supportinternal(repo):
- tr.close()
- else:
- _aborttransaction(repo, tr)
-
-def createcmd(ui, repo, pats, opts):
- """subcommand that creates a new shelve"""
- with repo.wlock():
- cmdutil.checkunfinished(repo)
- return _docreatecmd(ui, repo, pats, opts)
-
-def _docreatecmd(ui, repo, pats, opts):
- wctx = repo[None]
- parents = wctx.parents()
- if len(parents) > 1:
- raise error.Abort(_('cannot shelve while merging'))
- parent = parents[0]
- origbranch = wctx.branch()
-
- if parent.node() != nodemod.nullid:
- desc = "changes to: %s" % parent.description().split('\n', 1)[0]
- else:
- desc = '(changes in empty repository)'
-
- if not opts.get('message'):
- opts['message'] = desc
-
- lock = tr = activebookmark = None
- try:
- lock = repo.lock()
-
- # use an uncommitted transaction to generate the bundle to avoid
- # pull races. ensure we don't print the abort message to stderr.
- tr = repo.transaction('shelve', report=lambda x: None)
-
- interactive = opts.get('interactive', False)
- includeunknown = (opts.get('unknown', False) and
- not opts.get('addremove', False))
-
- name = getshelvename(repo, parent, opts)
- activebookmark = _backupactivebookmark(repo)
- extra = {'internal': 'shelve'}
- if includeunknown:
- _includeunknownfiles(repo, pats, opts, extra)
-
- if _iswctxonnewbranch(repo) and not _isbareshelve(pats, opts):
- # In non-bare shelve we don't store newly created branch
- # at bundled commit
- repo.dirstate.setbranch(repo['.'].branch())
-
- commitfunc = getcommitfunc(extra, interactive, editor=True)
- if not interactive:
- node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
- else:
- node = cmdutil.dorecord(ui, repo, commitfunc, None,
- False, cmdutil.recordfilter, *pats,
- **pycompat.strkwargs(opts))
- if not node:
- _nothingtoshelvemessaging(ui, repo, pats, opts)
- return 1
-
- # Create a matcher so that prefetch doesn't attempt to fetch
- # the entire repository pointlessly, and as an optimisation
- # for movedirstate, if needed.
- match = scmutil.matchfiles(repo, repo[node].files())
- _shelvecreatedcommit(repo, node, name, match)
-
- if ui.formatted():
- desc = stringutil.ellipsis(desc, ui.termwidth())
- ui.status(_('shelved as %s\n') % name)
- if opts['keep']:
- with repo.dirstate.parentchange():
- scmutil.movedirstate(repo, parent, match)
- else:
- hg.update(repo, parent.node())
- if origbranch != repo['.'].branch() and not _isbareshelve(pats, opts):
- repo.dirstate.setbranch(origbranch)
-
- _finishshelve(repo, tr)
- finally:
- _restoreactivebookmark(repo, activebookmark)
- lockmod.release(tr, lock)
-
-def _isbareshelve(pats, opts):
- return (not pats
- and not opts.get('interactive', False)
- and not opts.get('include', False)
- and not opts.get('exclude', False))
-
-def _iswctxonnewbranch(repo):
- return repo[None].branch() != repo['.'].branch()
-
-def cleanupcmd(ui, repo):
- """subcommand that deletes all shelves"""
-
- with repo.wlock():
- for (name, _type) in repo.vfs.readdir(shelvedir):
- suffix = name.rsplit('.', 1)[-1]
- if suffix in shelvefileextensions:
- shelvedfile(repo, name).movetobackup()
- cleanupoldbackups(repo)
-
-def deletecmd(ui, repo, pats):
- """subcommand that deletes a specific shelve"""
- if not pats:
- raise error.Abort(_('no shelved changes specified!'))
- with repo.wlock():
- try:
- for name in pats:
- for suffix in shelvefileextensions:
- shfile = shelvedfile(repo, name, suffix)
- # patch file is necessary, as it should
- # be present for any kind of shelve,
- # but the .hg file is optional as in future we
- # will add obsolete shelve with does not create a
- # bundle
- if shfile.exists() or suffix == patchextension:
- shfile.movetobackup()
- cleanupoldbackups(repo)
- except OSError as err:
- if err.errno != errno.ENOENT:
- raise
- raise error.Abort(_("shelved change '%s' not found") % name)
-
-def listshelves(repo):
- """return all shelves in repo as list of (time, filename)"""
- try:
- names = repo.vfs.readdir(shelvedir)
- except OSError as err:
- if err.errno != errno.ENOENT:
- raise
- return []
- info = []
- for (name, _type) in names:
- pfx, sfx = name.rsplit('.', 1)
- if not pfx or sfx != patchextension:
- continue
- st = shelvedfile(repo, name).stat()
- info.append((st[stat.ST_MTIME], shelvedfile(repo, pfx).filename()))
- return sorted(info, reverse=True)
-
-def listcmd(ui, repo, pats, opts):
- """subcommand that displays the list of shelves"""
- pats = set(pats)
- width = 80
- if not ui.plain():
- width = ui.termwidth()
- namelabel = 'shelve.newest'
- ui.pager('shelve')
- for mtime, name in listshelves(repo):
- sname = util.split(name)[1]
- if pats and sname not in pats:
- continue
- ui.write(sname, label=namelabel)
- namelabel = 'shelve.name'
- if ui.quiet:
- ui.write('\n')
- continue
- ui.write(' ' * (16 - len(sname)))
- used = 16
- date = dateutil.makedate(mtime)
- age = '(%s)' % templatefilters.age(date, abbrev=True)
- ui.write(age, label='shelve.age')
- ui.write(' ' * (12 - len(age)))
- used += 12
- with open(name + '.' + patchextension, 'rb') as fp:
- while True:
- line = fp.readline()
- if not line:
- break
- if not line.startswith('#'):
- desc = line.rstrip()
- if ui.formatted():
- desc = stringutil.ellipsis(desc, width - used)
- ui.write(desc)
- break
- ui.write('\n')
- if not (opts['patch'] or opts['stat']):
- continue
- difflines = fp.readlines()
- if opts['patch']:
- for chunk, label in patch.difflabel(iter, difflines):
- ui.write(chunk, label=label)
- if opts['stat']:
- for chunk, label in patch.diffstatui(difflines, width=width):
- ui.write(chunk, label=label)
-
-def patchcmds(ui, repo, pats, opts):
- """subcommand that displays shelves"""
- if len(pats) == 0:
- shelves = listshelves(repo)
- if not shelves:
- raise error.Abort(_("there are no shelves to show"))
- mtime, name = shelves[0]
- sname = util.split(name)[1]
- pats = [sname]
-
- for shelfname in pats:
- if not shelvedfile(repo, shelfname, patchextension).exists():
- raise error.Abort(_("cannot find shelf %s") % shelfname)
-
- listcmd(ui, repo, pats, opts)
-
-def checkparents(repo, state):
- """check parent while resuming an unshelve"""
- if state.parents != repo.dirstate.parents():
- raise error.Abort(_('working directory parents do not match unshelve '
- 'state'))
-
-def unshelveabort(ui, repo, state, opts):
- """subcommand that abort an in-progress unshelve"""
- with repo.lock():
- try:
- checkparents(repo, state)
-
- merge.update(repo, state.pendingctx, branchmerge=False, force=True)
- if (state.activebookmark
- and state.activebookmark in repo._bookmarks):
- bookmarks.activate(repo, state.activebookmark)
-
- if repo.vfs.exists('unshelverebasestate'):
- repo.vfs.rename('unshelverebasestate', 'rebasestate')
- rebase.clearstatus(repo)
-
- mergefiles(ui, repo, state.wctx, state.pendingctx)
- if not phases.supportinternal(repo):
- repair.strip(ui, repo, state.nodestoremove, backup=False,
- topic='shelve')
- finally:
- shelvedstate.clear(repo)
- ui.warn(_("unshelve of '%s' aborted\n") % state.name)
-
-def mergefiles(ui, repo, wctx, shelvectx):
- """updates to wctx and merges the changes from shelvectx into the
- dirstate."""
- with ui.configoverride({('ui', 'quiet'): True}):
- hg.update(repo, wctx.node())
- ui.pushbuffer(True)
- cmdutil.revert(ui, repo, shelvectx, repo.dirstate.parents())
- ui.popbuffer()
-
-def restorebranch(ui, repo, branchtorestore):
- if branchtorestore and branchtorestore != repo.dirstate.branch():
- repo.dirstate.setbranch(branchtorestore)
- ui.status(_('marked working directory as branch %s\n')
- % branchtorestore)
-
-def unshelvecleanup(ui, repo, name, opts):
- """remove related files after an unshelve"""
- if not opts.get('keep'):
- for filetype in shelvefileextensions:
- shfile = shelvedfile(repo, name, filetype)
- if shfile.exists():
- shfile.movetobackup()
- cleanupoldbackups(repo)
-
-def unshelvecontinue(ui, repo, state, opts):
- """subcommand to continue an in-progress unshelve"""
- # We're finishing off a merge. First parent is our original
- # parent, second is the temporary "fake" commit we're unshelving.
- with repo.lock():
- checkparents(repo, state)
- ms = merge.mergestate.read(repo)
- if list(ms.unresolved()):
- raise error.Abort(
- _("unresolved conflicts, can't continue"),
- hint=_("see 'hg resolve', then 'hg unshelve --continue'"))
-
- shelvectx = repo[state.parents[1]]
- pendingctx = state.pendingctx
-
- with repo.dirstate.parentchange():
- repo.setparents(state.pendingctx.node(), nodemod.nullid)
- repo.dirstate.write(repo.currenttransaction())
-
- targetphase = phases.internal
- if not phases.supportinternal(repo):
- targetphase = phases.secret
- overrides = {('phases', 'new-commit'): targetphase}
- with repo.ui.configoverride(overrides, 'unshelve'):
- with repo.dirstate.parentchange():
- repo.setparents(state.parents[0], nodemod.nullid)
- newnode = repo.commit(text=shelvectx.description(),
- extra=shelvectx.extra(),
- user=shelvectx.user(),
- date=shelvectx.date())
-
- if newnode is None:
- # If it ended up being a no-op commit, then the normal
- # merge state clean-up path doesn't happen, so do it
- # here. Fix issue5494
- merge.mergestate.clean(repo)
- shelvectx = state.pendingctx
- msg = _('note: unshelved changes already existed '
- 'in the working copy\n')
- ui.status(msg)
- else:
- # only strip the shelvectx if we produced one
- state.nodestoremove.append(newnode)
- shelvectx = repo[newnode]
-
- hg.updaterepo(repo, pendingctx.node(), overwrite=False)
-
- if repo.vfs.exists('unshelverebasestate'):
- repo.vfs.rename('unshelverebasestate', 'rebasestate')
- rebase.clearstatus(repo)
-
- mergefiles(ui, repo, state.wctx, shelvectx)
- restorebranch(ui, repo, state.branchtorestore)
-
- if not phases.supportinternal(repo):
- repair.strip(ui, repo, state.nodestoremove, backup=False,
- topic='shelve')
- _restoreactivebookmark(repo, state.activebookmark)
- shelvedstate.clear(repo)
- unshelvecleanup(ui, repo, state.name, opts)
- ui.status(_("unshelve of '%s' complete\n") % state.name)
-
-def _commitworkingcopychanges(ui, repo, opts, tmpwctx):
- """Temporarily commit working copy changes before moving unshelve commit"""
- # Store pending changes in a commit and remember added in case a shelve
- # contains unknown files that are part of the pending change
- s = repo.status()
- addedbefore = frozenset(s.added)
- if not (s.modified or s.added or s.removed):
- return tmpwctx, addedbefore
- ui.status(_("temporarily committing pending changes "
- "(restore with 'hg unshelve --abort')\n"))
- extra = {'internal': 'shelve'}
- commitfunc = getcommitfunc(extra=extra, interactive=False,
- editor=False)
- tempopts = {}
- tempopts['message'] = "pending changes temporary commit"
- tempopts['date'] = opts.get('date')
- with ui.configoverride({('ui', 'quiet'): True}):
- node = cmdutil.commit(ui, repo, commitfunc, [], tempopts)
- tmpwctx = repo[node]
- return tmpwctx, addedbefore
-
-def _unshelverestorecommit(ui, repo, tr, basename):
- """Recreate commit in the repository during the unshelve"""
- repo = repo.unfiltered()
- node = None
- if shelvedfile(repo, basename, 'shelve').exists():
- node = shelvedfile(repo, basename, 'shelve').readinfo()['node']
- if node is None or node not in repo:
- with ui.configoverride({('ui', 'quiet'): True}):
- shelvectx = shelvedfile(repo, basename, 'hg').applybundle(tr)
- # We might not strip the unbundled changeset, so we should keep track of
- # the unshelve node in case we need to reuse it (eg: unshelve --keep)
- if node is None:
- info = {'node': nodemod.hex(shelvectx.node())}
- shelvedfile(repo, basename, 'shelve').writeinfo(info)
- else:
- shelvectx = repo[node]
-
- return repo, shelvectx
-
-def _rebaserestoredcommit(ui, repo, opts, tr, oldtiprev, basename, pctx,
- tmpwctx, shelvectx, branchtorestore,
- activebookmark):
- """Rebase restored commit from its original location to a destination"""
- # If the shelve is not immediately on top of the commit
- # we'll be merging with, rebase it to be on top.
- if tmpwctx.node() == shelvectx.p1().node():
- return shelvectx
-
- overrides = {
- ('ui', 'forcemerge'): opts.get('tool', ''),
- ('phases', 'new-commit'): phases.secret,
- }
- with repo.ui.configoverride(overrides, 'unshelve'):
- ui.status(_('rebasing shelved changes\n'))
- stats = merge.graft(repo, shelvectx, shelvectx.p1(),
- labels=['shelve', 'working-copy'],
- keepconflictparent=True)
- if stats.unresolvedcount:
- tr.close()
-
- nodestoremove = [repo.changelog.node(rev)
- for rev in pycompat.xrange(oldtiprev, len(repo))]
- shelvedstate.save(repo, basename, pctx, tmpwctx, nodestoremove,
- branchtorestore, opts.get('keep'), activebookmark)
- raise error.InterventionRequired(
- _("unresolved conflicts (see 'hg resolve', then "
- "'hg unshelve --continue')"))
-
- with repo.dirstate.parentchange():
- repo.setparents(tmpwctx.node(), nodemod.nullid)
- newnode = repo.commit(text=shelvectx.description(),
- extra=shelvectx.extra(),
- user=shelvectx.user(),
- date=shelvectx.date())
-
- if newnode is None:
- # If it ended up being a no-op commit, then the normal
- # merge state clean-up path doesn't happen, so do it
- # here. Fix issue5494
- merge.mergestate.clean(repo)
- shelvectx = tmpwctx
- msg = _('note: unshelved changes already existed '
- 'in the working copy\n')
- ui.status(msg)
- else:
- shelvectx = repo[newnode]
- hg.updaterepo(repo, tmpwctx.node(), False)
-
- return shelvectx
-
-def _forgetunknownfiles(repo, shelvectx, addedbefore):
- # Forget any files that were unknown before the shelve, unknown before
- # unshelve started, but are now added.
- shelveunknown = shelvectx.extra().get('shelve_unknown')
- if not shelveunknown:
- return
- shelveunknown = frozenset(shelveunknown.split('\0'))
- addedafter = frozenset(repo.status().added)
- toforget = (addedafter & shelveunknown) - addedbefore
- repo[None].forget(toforget)
-
-def _finishunshelve(repo, oldtiprev, tr, activebookmark):
- _restoreactivebookmark(repo, activebookmark)
- # The transaction aborting will strip all the commits for us,
- # but it doesn't update the inmemory structures, so addchangegroup
- # hooks still fire and try to operate on the missing commits.
- # Clean up manually to prevent this.
- repo.unfiltered().changelog.strip(oldtiprev, tr)
- _aborttransaction(repo, tr)
-
-def _checkunshelveuntrackedproblems(ui, repo, shelvectx):
- """Check potential problems which may result from working
- copy having untracked changes."""
- wcdeleted = set(repo.status().deleted)
- shelvetouched = set(shelvectx.files())
- intersection = wcdeleted.intersection(shelvetouched)
- if intersection:
- m = _("shelved change touches missing files")
- hint = _("run hg status to see which files are missing")
- raise error.Abort(m, hint=hint)
-
-@command('unshelve',
- [('a', 'abort', None,
- _('abort an incomplete unshelve operation')),
- ('c', 'continue', None,
- _('continue an incomplete unshelve operation')),
- ('k', 'keep', None,
- _('keep shelve after unshelving')),
- ('n', 'name', '',
- _('restore shelved change with given name'), _('NAME')),
- ('t', 'tool', '', _('specify merge tool')),
- ('', 'date', '',
- _('set date for temporary commits (DEPRECATED)'), _('DATE'))],
- _('hg unshelve [[-n] SHELVED]'),
- helpcategory=command.CATEGORY_WORKING_DIRECTORY)
-def unshelve(ui, repo, *shelved, **opts):
- """restore a shelved change to the working directory
-
- This command accepts an optional name of a shelved change to
- restore. If none is given, the most recent shelved change is used.
-
- If a shelved change is applied successfully, the bundle that
- contains the shelved changes is moved to a backup location
- (.hg/shelve-backup).
-
- Since you can restore a shelved change on top of an arbitrary
- commit, it is possible that unshelving will result in a conflict
- between your changes and the commits you are unshelving onto. If
- this occurs, you must resolve the conflict, then use
- ``--continue`` to complete the unshelve operation. (The bundle
- will not be moved until you successfully complete the unshelve.)
-
- (Alternatively, you can use ``--abort`` to abandon an unshelve
- that causes a conflict. This reverts the unshelved changes, and
- leaves the bundle in place.)
-
- If bare shelved change(when no files are specified, without interactive,
- include and exclude option) was done on newly created branch it would
- restore branch information to the working directory.
-
- After a successful unshelve, the shelved changes are stored in a
- backup directory. Only the N most recent backups are kept. N
- defaults to 10 but can be overridden using the ``shelve.maxbackups``
- configuration option.
-
- .. container:: verbose
-
- Timestamp in seconds is used to decide order of backups. More
- than ``maxbackups`` backups are kept, if same timestamp
- prevents from deciding exact order of them, for safety.
- """
- with repo.wlock():
- return _dounshelve(ui, repo, *shelved, **opts)
-
-def _dounshelve(ui, repo, *shelved, **opts):
- opts = pycompat.byteskwargs(opts)
- abortf = opts.get('abort')
- continuef = opts.get('continue')
- if not abortf and not continuef:
- cmdutil.checkunfinished(repo)
- shelved = list(shelved)
- if opts.get("name"):
- shelved.append(opts["name"])
-
- if abortf or continuef:
- if abortf and continuef:
- raise error.Abort(_('cannot use both abort and continue'))
- if shelved:
- raise error.Abort(_('cannot combine abort/continue with '
- 'naming a shelved change'))
- if abortf and opts.get('tool', False):
- ui.warn(_('tool option will be ignored\n'))
-
- try:
- state = shelvedstate.load(repo)
- if opts.get('keep') is None:
- opts['keep'] = state.keep
- except IOError as err:
- if err.errno != errno.ENOENT:
- raise
- cmdutil.wrongtooltocontinue(repo, _('unshelve'))
- except error.CorruptedState as err:
- ui.debug(pycompat.bytestr(err) + '\n')
- if continuef:
- msg = _('corrupted shelved state file')
- hint = _('please run hg unshelve --abort to abort unshelve '
- 'operation')
- raise error.Abort(msg, hint=hint)
- elif abortf:
- msg = _('could not read shelved state file, your working copy '
- 'may be in an unexpected state\nplease update to some '
- 'commit\n')
- ui.warn(msg)
- shelvedstate.clear(repo)
- return
-
- if abortf:
- return unshelveabort(ui, repo, state, opts)
- elif continuef:
- return unshelvecontinue(ui, repo, state, opts)
- elif len(shelved) > 1:
- raise error.Abort(_('can only unshelve one change at a time'))
-
- # abort unshelve while merging (issue5123)
- parents = repo[None].parents()
- if len(parents) > 1:
- raise error.Abort(_('cannot unshelve while merging'))
-
- elif not shelved:
- shelved = listshelves(repo)
- if not shelved:
- raise error.Abort(_('no shelved changes to apply!'))
- basename = util.split(shelved[0][1])[1]
- ui.status(_("unshelving change '%s'\n") % basename)
- else:
- basename = shelved[0]
-
- if not shelvedfile(repo, basename, patchextension).exists():
- raise error.Abort(_("shelved change '%s' not found") % basename)
-
- repo = repo.unfiltered()
- lock = tr = None
- try:
- lock = repo.lock()
- tr = repo.transaction('unshelve', report=lambda x: None)
- oldtiprev = len(repo)
-
- pctx = repo['.']
- tmpwctx = pctx
- # The goal is to have a commit structure like so:
- # ...-> pctx -> tmpwctx -> shelvectx
- # where tmpwctx is an optional commit with the user's pending changes
- # and shelvectx is the unshelved changes. Then we merge it all down
- # to the original pctx.
-
- activebookmark = _backupactivebookmark(repo)
- tmpwctx, addedbefore = _commitworkingcopychanges(ui, repo, opts,
- tmpwctx)
- repo, shelvectx = _unshelverestorecommit(ui, repo, tr, basename)
- _checkunshelveuntrackedproblems(ui, repo, shelvectx)
- branchtorestore = ''
- if shelvectx.branch() != shelvectx.p1().branch():
- branchtorestore = shelvectx.branch()
-
- shelvectx = _rebaserestoredcommit(ui, repo, opts, tr, oldtiprev,
- basename, pctx, tmpwctx,
- shelvectx, branchtorestore,
- activebookmark)
- overrides = {('ui', 'forcemerge'): opts.get('tool', '')}
- with ui.configoverride(overrides, 'unshelve'):
- mergefiles(ui, repo, pctx, shelvectx)
- restorebranch(ui, repo, branchtorestore)
- _forgetunknownfiles(repo, shelvectx, addedbefore)
-
- shelvedstate.clear(repo)
- _finishunshelve(repo, oldtiprev, tr, activebookmark)
- unshelvecleanup(ui, repo, basename, opts)
- finally:
- if tr:
- tr.release()
- lockmod.release(lock)
-
-@command('shelve',
- [('A', 'addremove', None,
- _('mark new/missing files as added/removed before shelving')),
- ('u', 'unknown', None,
- _('store unknown files in the shelve')),
- ('', 'cleanup', None,
- _('delete all shelved changes')),
- ('', 'date', '',
- _('shelve with the specified commit date'), _('DATE')),
- ('d', 'delete', None,
- _('delete the named shelved change(s)')),
- ('e', 'edit', False,
- _('invoke editor on commit messages')),
- ('k', 'keep', False,
- _('shelve, but keep changes in the working directory')),
- ('l', 'list', None,
- _('list current shelves')),
- ('m', 'message', '',
- _('use text as shelve message'), _('TEXT')),
- ('n', 'name', '',
- _('use the given name for the shelved commit'), _('NAME')),
- ('p', 'patch', None,
- _('output patches for changes (provide the names of the shelved '
- 'changes as positional arguments)')),
- ('i', 'interactive', None,
- _('interactive mode, only works while creating a shelve')),
- ('', 'stat', None,
- _('output diffstat-style summary of changes (provide the names of '
- 'the shelved changes as positional arguments)')
- )] + cmdutil.walkopts,
- _('hg shelve [OPTION]... [FILE]...'),
- helpcategory=command.CATEGORY_WORKING_DIRECTORY)
-def shelvecmd(ui, repo, *pats, **opts):
- '''save and set aside changes from the working directory
-
- Shelving takes files that "hg status" reports as not clean, saves
- the modifications to a bundle (a shelved change), and reverts the
- files so that their state in the working directory becomes clean.
-
- To restore these changes to the working directory, using "hg
- unshelve"; this will work even if you switch to a different
- commit.
-
- When no files are specified, "hg shelve" saves all not-clean
- files. If specific files or directories are named, only changes to
- those files are shelved.
-
- In bare shelve (when no files are specified, without interactive,
- include and exclude option), shelving remembers information if the
- working directory was on newly created branch, in other words working
- directory was on different branch than its first parent. In this
- situation unshelving restores branch information to the working directory.
-
- Each shelved change has a name that makes it easier to find later.
- The name of a shelved change defaults to being based on the active
- bookmark, or if there is no active bookmark, the current named
- branch. To specify a different name, use ``--name``.
-
- To see a list of existing shelved changes, use the ``--list``
- option. For each shelved change, this will print its name, age,
- and description; use ``--patch`` or ``--stat`` for more details.
-
- To delete specific shelved changes, use ``--delete``. To delete
- all shelved changes, use ``--cleanup``.
- '''
- opts = pycompat.byteskwargs(opts)
- allowables = [
- ('addremove', {'create'}), # 'create' is pseudo action
- ('unknown', {'create'}),
- ('cleanup', {'cleanup'}),
-# ('date', {'create'}), # ignored for passing '--date "0 0"' in tests
- ('delete', {'delete'}),
- ('edit', {'create'}),
- ('keep', {'create'}),
- ('list', {'list'}),
- ('message', {'create'}),
- ('name', {'create'}),
- ('patch', {'patch', 'list'}),
- ('stat', {'stat', 'list'}),
- ]
- def checkopt(opt):
- if opts.get(opt):
- for i, allowable in allowables:
- if opts[i] and opt not in allowable:
- raise error.Abort(_("options '--%s' and '--%s' may not be "
- "used together") % (opt, i))
- return True
- if checkopt('cleanup'):
- if pats:
- raise error.Abort(_("cannot specify names when using '--cleanup'"))
- return cleanupcmd(ui, repo)
- elif checkopt('delete'):
- return deletecmd(ui, repo, pats)
- elif checkopt('list'):
- return listcmd(ui, repo, pats, opts)
- elif checkopt('patch') or checkopt('stat'):
- return patchcmds(ui, repo, pats, opts)
- else:
- return createcmd(ui, repo, pats, opts)
-
-def extsetup(ui):
- cmdutil.unfinishedstates.append(
- [shelvedstate._filename, False, False,
- _('unshelve already in progress'),
- _("use 'hg unshelve --continue' or 'hg unshelve --abort'")])
- cmdutil.afterresolvedstates.append(
- [shelvedstate._filename, _('hg unshelve --continue')])
--- a/hgext/show.py Tue Jul 09 10:07:35 2019 -0400
+++ b/hgext/show.py Mon Jul 22 14:00:33 2019 -0400
@@ -460,8 +460,8 @@
longest = max(map(len, showview._table.keys()))
entries = []
for key in sorted(showview._table.keys()):
- entries.append(pycompat.sysstr(' %s %s' % (
- key.ljust(longest), showview._table[key]._origdoc)))
+ entries.append(r' %s %s' % (
+ pycompat.sysstr(key.ljust(longest)), showview._table[key]._origdoc))
cmdtable['show'][0].__doc__ = pycompat.sysstr('%s\n\n%s\n ') % (
cmdtable['show'][0].__doc__.rstrip(),
--- a/hgext/sparse.py Tue Jul 09 10:07:35 2019 -0400
+++ b/hgext/sparse.py Mon Jul 22 14:00:33 2019 -0400
@@ -228,7 +228,7 @@
hint = _('include file with `hg debugsparse --include <pattern>` or use ' +
'`hg add -s <file>` to include file directory while adding')
for func in editfuncs:
- def _wrapper(orig, self, *args):
+ def _wrapper(orig, self, *args, **kwargs):
sparsematch = self._sparsematcher
if not sparsematch.always():
for f in args:
@@ -237,7 +237,7 @@
raise error.Abort(_("cannot add '%s' - it is outside "
"the sparse checkout") % f,
hint=hint)
- return orig(self, *args)
+ return orig(self, *args, **kwargs)
extensions.wrapfunction(dirstate.dirstate, func, _wrapper)
@command('debugsparse', [
--- a/hgext/strip.py Tue Jul 09 10:07:35 2019 -0400
+++ b/hgext/strip.py Mon Jul 22 14:00:33 2019 -0400
@@ -31,31 +31,13 @@
# leave the attribute unspecified.
testedwith = 'ships-with-hg-core'
-def checksubstate(repo, baserev=None):
- '''return list of subrepos at a different revision than substate.
- Abort if any subrepos have uncommitted changes.'''
- inclsubs = []
- wctx = repo[None]
- if baserev:
- bctx = repo[baserev]
- else:
- bctx = wctx.p1()
- for s in sorted(wctx.substate):
- wctx.sub(s).bailifchanged(True)
- if s not in bctx.substate or bctx.sub(s).dirty():
- inclsubs.append(s)
- return inclsubs
-
-def checklocalchanges(repo, force=False, excsuffix=''):
- cmdutil.checkunfinished(repo)
+def checklocalchanges(repo, force=False):
s = repo.status()
if not force:
- if s.modified or s.added or s.removed or s.deleted:
- _("local changes found") # i18n tool detection
- raise error.Abort(_("local changes found" + excsuffix))
- if checksubstate(repo):
- _("local changed subrepos found") # i18n tool detection
- raise error.Abort(_("local changed subrepos found" + excsuffix))
+ cmdutil.checkunfinished(repo)
+ cmdutil.bailifchanged(repo)
+ else:
+ cmdutil.checkunfinished(repo, skipmerge=True)
return s
def _findupdatetarget(repo, nodes):
--- a/hgext/transplant.py Tue Jul 09 10:07:35 2019 -0400
+++ b/hgext/transplant.py Mon Jul 22 14:00:33 2019 -0400
@@ -35,6 +35,7 @@
revset,
scmutil,
smartset,
+ state as statemod,
util,
vfs as vfsmod,
)
@@ -757,9 +758,12 @@
return n and nodemod.hex(n) or ''
def extsetup(ui):
- cmdutil.unfinishedstates.append(
- ['transplant/journal', True, False, _('transplant in progress'),
- _("use 'hg transplant --continue' or 'hg update' to abort")])
+ statemod.addunfinished (
+ 'transplant', fname='transplant/journal', clearable=True,
+ statushint=_('To continue: hg transplant --continue\n'
+ 'To abort: hg update'),
+ cmdhint=_("use 'hg transplant --continue' or 'hg update' to abort")
+ )
# tell hggettext to extract docstrings from these functions:
i18nfunctions = [revsettransplanted, kwtransplanted]
--- a/hgext/zeroconf/Zeroconf.py Tue Jul 09 10:07:35 2019 -0400
+++ b/hgext/zeroconf/Zeroconf.py Mon Jul 22 14:00:33 2019 -0400
@@ -89,6 +89,8 @@
import time
import traceback
+from mercurial import pycompat
+
__all__ = ["Zeroconf", "ServiceInfo", "ServiceBrowser"]
# hook for threads
@@ -270,6 +272,8 @@
"""A DNS question entry"""
def __init__(self, name, type, clazz):
+ if pycompat.ispy3 and isinstance(name, str):
+ name = name.encode('ascii')
if not name.endswith(".local."):
raise NonLocalNameException(name)
DNSEntry.__init__(self, name, type, clazz)
@@ -535,7 +539,7 @@
def readString(self, len):
"""Reads a string of a given length from the packet"""
- format = '!' + str(len) + 's'
+ format = '!%ds' % len
length = struct.calcsize(format)
info = struct.unpack(format,
self.data[self.offset:self.offset + length])
@@ -613,7 +617,7 @@
def readName(self):
"""Reads a domain name from the packet"""
- result = ''
+ result = r''
off = self.offset
next = -1
first = off
@@ -625,7 +629,7 @@
break
t = len & 0xC0
if t == 0x00:
- result = ''.join((result, self.readUTF(off, len) + '.'))
+ result = r''.join((result, self.readUTF(off, len) + r'.'))
off += len
elif t == 0xC0:
if next < 0:
--- a/hgext/zeroconf/__init__.py Tue Jul 09 10:07:35 2019 -0400
+++ b/hgext/zeroconf/__init__.py Mon Jul 22 14:00:33 2019 -0400
@@ -34,6 +34,7 @@
encoding,
extensions,
hg,
+ pycompat,
ui as uimod,
)
from mercurial.hgweb import (
@@ -55,7 +56,7 @@
# finds external-facing interface without sending any packets (Linux)
try:
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
- s.connect(('1.0.0.1', 0))
+ s.connect((r'1.0.0.1', 0))
ip = s.getsockname()[0]
return ip
except socket.error:
@@ -64,17 +65,17 @@
# Generic method, sometimes gives useless results
try:
dumbip = socket.gethostbyaddr(socket.gethostname())[2][0]
- if ':' in dumbip:
- dumbip = '127.0.0.1'
- if not dumbip.startswith('127.'):
+ if r':' in dumbip:
+ dumbip = r'127.0.0.1'
+ if not dumbip.startswith(r'127.'):
return dumbip
except (socket.gaierror, socket.herror):
- dumbip = '127.0.0.1'
+ dumbip = r'127.0.0.1'
# works elsewhere, but actually sends a packet
try:
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
- s.connect(('1.0.0.1', 1))
+ s.connect((r'1.0.0.1', 1))
ip = s.getsockname()[0]
return ip
except socket.error:
@@ -86,19 +87,19 @@
global server, localip
if not server:
ip = getip()
- if ip.startswith('127.'):
+ if ip.startswith(r'127.'):
# if we have no internet connection, this can happen.
return
localip = socket.inet_aton(ip)
server = Zeroconf.Zeroconf(ip)
- hostname = socket.gethostname().split('.')[0]
- host = hostname + ".local"
- name = "%s-%s" % (hostname, name)
+ hostname = socket.gethostname().split(r'.')[0]
+ host = hostname + r".local"
+ name = r"%s-%s" % (hostname, name)
# advertise to browsers
svc = Zeroconf.ServiceInfo('_http._tcp.local.',
- name + '._http._tcp.local.',
+ pycompat.bytestr(name + r'._http._tcp.local.'),
server = host,
port = port,
properties = {'description': desc,
@@ -108,7 +109,7 @@
# advertise to Mercurial clients
svc = Zeroconf.ServiceInfo('_hg._tcp.local.',
- name + '._hg._tcp.local.',
+ pycompat.bytestr(name + r'._hg._tcp.local.'),
server = host,
port = port,
properties = {'description': desc,
@@ -158,7 +159,7 @@
def getzcpaths():
ip = getip()
- if ip.startswith('127.'):
+ if ip.startswith(r'127.'):
return
server = Zeroconf.Zeroconf(ip)
l = listener()
@@ -166,10 +167,10 @@
time.sleep(1)
server.close()
for value in l.found.values():
- name = value.name[:value.name.index('.')]
- url = "http://%s:%s%s" % (socket.inet_ntoa(value.address), value.port,
- value.properties.get("path", "/"))
- yield "zc-" + name, url
+ name = value.name[:value.name.index(b'.')]
+ url = r"http://%s:%s%s" % (socket.inet_ntoa(value.address), value.port,
+ value.properties.get(r"path", r"/"))
+ yield b"zc-" + name, pycompat.bytestr(url)
def config(orig, self, section, key, *args, **kwargs):
if section == "paths" and key.startswith("zc-"):
--- a/mercurial/__init__.py Tue Jul 09 10:07:35 2019 -0400
+++ b/mercurial/__init__.py Mon Jul 22 14:00:33 2019 -0400
@@ -29,7 +29,7 @@
"""A sys.meta_path finder that uses a custom module loader."""
def find_spec(self, fullname, path, target=None):
# Only handle Mercurial-related modules.
- if not fullname.startswith(('mercurial.', 'hgext.', 'hgext3rd.')):
+ if not fullname.startswith(('mercurial.', 'hgext.')):
return None
# don't try to parse binary
if fullname.startswith('mercurial.cext.'):
@@ -54,7 +54,16 @@
if finder == self:
continue
- spec = finder.find_spec(fullname, path, target=target)
+ # Originally the API was a `find_module` method, but it was
+ # renamed to `find_spec` in python 3.4, with a new `target`
+ # argument.
+ find_spec_method = getattr(finder, 'find_spec', None)
+ if find_spec_method:
+ spec = find_spec_method(fullname, path, target=target)
+ else:
+ spec = finder.find_module(fullname)
+ if spec is not None:
+ spec = importlib.util.spec_from_loader(fullname, spec)
if spec:
break
@@ -216,7 +225,9 @@
# It changes iteritems/values to items/values as they are not
# present in Python 3 world.
- elif fn in ('iteritems', 'itervalues'):
+ elif (fn in ('iteritems', 'itervalues') and
+ not (tokens[i - 1].type == token.NAME and
+ tokens[i - 1].string == 'def')):
yield t._replace(string=fn[4:])
continue
@@ -227,7 +238,7 @@
# ``replacetoken`` or any mechanism that changes semantics of module
# loading is changed. Otherwise cached bytecode may get loaded without
# the new transformation mechanisms applied.
- BYTECODEHEADER = b'HG\x00\x0b'
+ BYTECODEHEADER = b'HG\x00\x0c'
class hgloader(importlib.machinery.SourceFileLoader):
"""Custom module loader that transforms source code.
--- a/mercurial/bookmarks.py Tue Jul 09 10:07:35 2019 -0400
+++ b/mercurial/bookmarks.py Mon Jul 22 14:00:33 2019 -0400
@@ -33,6 +33,14 @@
# custom styles
activebookmarklabel = 'bookmarks.active bookmarks.current'
+BOOKMARKS_IN_STORE_REQUIREMENT = 'bookmarksinstore'
+
+def bookmarksinstore(repo):
+ return BOOKMARKS_IN_STORE_REQUIREMENT in repo.requirements
+
+def bookmarksvfs(repo):
+ return repo.svfs if bookmarksinstore(repo) else repo.vfs
+
def _getbkfile(repo):
"""Hook so that extensions that mess with the store can hook bm storage.
@@ -40,7 +48,7 @@
bookmarks or the committed ones. Other extensions (like share)
may need to tweak this behavior further.
"""
- fp, pending = txnutil.trypending(repo.root, repo.vfs, 'bookmarks')
+ fp, pending = txnutil.trypending(repo.root, bookmarksvfs(repo), 'bookmarks')
return fp
class bmstore(object):
@@ -91,8 +99,11 @@
# ValueError:
# - node in nm, for non-20-bytes entry
# - split(...), for string without ' '
- repo.ui.warn(_('malformed line in .hg/bookmarks: %r\n')
- % pycompat.bytestr(line))
+ bookmarkspath = '.hg/bookmarks'
+ if bookmarksinstore(repo):
+ bookmarkspath = '.hg/store/bookmarks'
+ repo.ui.warn(_('malformed line in %s: %r\n')
+ % (bookmarkspath, pycompat.bytestr(line)))
except IOError as inst:
if inst.errno != errno.ENOENT:
raise
@@ -192,8 +203,9 @@
"""record that bookmarks have been changed in a transaction
The transaction is then responsible for updating the file content."""
+ location = '' if bookmarksinstore(self._repo) else 'plain'
tr.addfilegenerator('bookmarks', ('bookmarks',), self._write,
- location='plain')
+ location=location)
tr.hookargs['bookmark_moved'] = '1'
def _writerepo(self, repo):
@@ -203,28 +215,24 @@
rbm.active = None
rbm._writeactive()
- with repo.wlock():
- file_ = repo.vfs('bookmarks', 'w', atomictemp=True,
- checkambig=True)
- try:
- self._write(file_)
- except: # re-raises
- file_.discard()
- raise
- finally:
- file_.close()
+ if bookmarksinstore(repo):
+ vfs = repo.svfs
+ lock = repo.lock()
+ else:
+ vfs = repo.vfs
+ lock = repo.wlock()
+ with lock:
+ with vfs('bookmarks', 'w', atomictemp=True, checkambig=True) as f:
+ self._write(f)
def _writeactive(self):
if self._aclean:
return
with self._repo.wlock():
if self._active is not None:
- f = self._repo.vfs('bookmarks.current', 'w', atomictemp=True,
- checkambig=True)
- try:
+ with self._repo.vfs('bookmarks.current', 'w', atomictemp=True,
+ checkambig=True) as f:
f.write(encoding.fromlocal(self._active))
- finally:
- f.close()
else:
self._repo.vfs.tryunlink('bookmarks.current')
self._aclean = True
@@ -306,28 +314,12 @@
itself as we commit. This function returns the name of that bookmark.
It is stored in .hg/bookmarks.current
"""
- try:
- file = repo.vfs('bookmarks.current')
- except IOError as inst:
- if inst.errno != errno.ENOENT:
- raise
- return None
- try:
- # No readline() in osutil.posixfile, reading everything is
- # cheap.
- # Note that it's possible for readlines() here to raise
- # IOError, since we might be reading the active mark over
- # static-http which only tries to load the file when we try
- # to read from it.
- mark = encoding.tolocal((file.readlines() or [''])[0])
- if mark == '' or mark not in marks:
- mark = None
- except IOError as inst:
- if inst.errno != errno.ENOENT:
- raise
- return None
- finally:
- file.close()
+ # No readline() in osutil.posixfile, reading everything is
+ # cheap.
+ content = repo.vfs.tryread('bookmarks.current')
+ mark = encoding.tolocal((content.splitlines() or [''])[0])
+ if mark == '' or mark not in marks:
+ mark = None
return mark
def activate(repo, mark):
@@ -453,7 +445,11 @@
return d
def pushbookmark(repo, key, old, new):
- with repo.wlock(), repo.lock(), repo.transaction('bookmarks') as tr:
+ if bookmarksinstore(repo):
+ wlock = util.nullcontextmanager()
+ else:
+ wlock = repo.wlock()
+ with wlock, repo.lock(), repo.transaction('bookmarks') as tr:
marks = repo._bookmarks
existing = hex(marks.get(key, ''))
if existing != old and existing != new:
--- a/mercurial/branchmap.py Tue Jul 09 10:07:35 2019 -0400
+++ b/mercurial/branchmap.py Mon Jul 22 14:00:33 2019 -0400
@@ -121,6 +121,12 @@
"""
raise ValueError(r'node %s does not exist' % pycompat.sysstr(hex(node)))
+def _branchcachedesc(repo):
+ if repo.filtername is not None:
+ return 'branch cache (%s)' % repo.filtername
+ else:
+ return 'branch cache'
+
class branchcache(object):
"""A dict like object that hold branches heads cache.
@@ -212,6 +218,8 @@
self._verifybranch(k)
yield k, v
+ items = iteritems
+
def hasbranch(self, label):
""" checks whether a branch of this name exists or not """
self._verifybranch(label)
@@ -241,11 +249,9 @@
except Exception as inst:
if repo.ui.debugflag:
- msg = 'invalid branchheads cache'
- if repo.filtername is not None:
- msg += ' (%s)' % repo.filtername
- msg += ': %s\n'
- repo.ui.debug(msg % pycompat.bytestr(inst))
+ msg = 'invalid %s: %s\n'
+ repo.ui.debug(msg % (_branchcachedesc(repo),
+ pycompat.bytestr(inst)))
bcache = None
finally:
@@ -351,9 +357,8 @@
state = 'o'
f.write("%s %s %s\n" % (hex(node), state, label))
f.close()
- repo.ui.log('branchcache',
- 'wrote %s branch cache with %d labels and %d nodes\n',
- repo.filtername, len(self._entries), nodecount)
+ repo.ui.log('branchcache', 'wrote %s with %d labels and %d nodes\n',
+ _branchcachedesc(repo), len(self._entries), nodecount)
except (IOError, OSError, error.Abort) as inst:
# Abort may be raised by read only opener, so log and continue
repo.ui.debug("couldn't write branch cache: %s\n" %
@@ -378,6 +383,10 @@
# fetch current topological heads to speed up filtering
topoheads = set(cl.headrevs())
+ # new tip revision which we found after iterating items from new
+ # branches
+ ntiprev = self.tiprev
+
# if older branchheads are reachable from new ones, they aren't
# really branchheads. Note checking parents is insufficient:
# 1 (branch a) -> 2 (branch b) -> 3 (branch a)
@@ -401,9 +410,12 @@
bheadrevs = sorted(bheadset)
self[branch] = [cl.node(rev) for rev in bheadrevs]
tiprev = bheadrevs[-1]
- if tiprev > self.tiprev:
- self.tipnode = cl.node(tiprev)
- self.tiprev = tiprev
+ if tiprev > ntiprev:
+ ntiprev = tiprev
+
+ if ntiprev > self.tiprev:
+ self.tiprev = ntiprev
+ self.tipnode = cl.node(ntiprev)
if not self.validfor(repo):
# cache key are not valid anymore
@@ -417,8 +429,8 @@
self.filteredhash = scmutil.filteredhash(repo, self.tiprev)
duration = util.timer() - starttime
- repo.ui.log('branchcache', 'updated %s branch cache in %.4f seconds\n',
- repo.filtername or b'None', duration)
+ repo.ui.log('branchcache', 'updated %s in %.4f seconds\n',
+ _branchcachedesc(repo), duration)
self.write(repo)
@@ -608,51 +620,59 @@
wlock = None
step = ''
try:
+ # write the new names
if self._rbcnamescount < len(self._names):
- step = ' names'
wlock = repo.wlock(wait=False)
- if self._rbcnamescount != 0:
- f = repo.cachevfs.open(_rbcnames, 'ab')
- if f.tell() == self._rbcsnameslen:
- f.write('\0')
- else:
- f.close()
- repo.ui.debug("%s changed - rewriting it\n" % _rbcnames)
- self._rbcnamescount = 0
- self._rbcrevslen = 0
- if self._rbcnamescount == 0:
- # before rewriting names, make sure references are removed
- repo.cachevfs.unlinkpath(_rbcrevs, ignoremissing=True)
- f = repo.cachevfs.open(_rbcnames, 'wb')
- f.write('\0'.join(encoding.fromlocal(b)
- for b in self._names[self._rbcnamescount:]))
- self._rbcsnameslen = f.tell()
- f.close()
- self._rbcnamescount = len(self._names)
+ step = ' names'
+ self._writenames(repo)
+ # write the new revs
start = self._rbcrevslen * _rbcrecsize
if start != len(self._rbcrevs):
step = ''
if wlock is None:
wlock = repo.wlock(wait=False)
- revs = min(len(repo.changelog),
- len(self._rbcrevs) // _rbcrecsize)
- f = repo.cachevfs.open(_rbcrevs, 'ab')
- if f.tell() != start:
- repo.ui.debug("truncating cache/%s to %d\n"
- % (_rbcrevs, start))
- f.seek(start)
- if f.tell() != start:
- start = 0
- f.seek(start)
- f.truncate()
- end = revs * _rbcrecsize
- f.write(self._rbcrevs[start:end])
- f.close()
- self._rbcrevslen = revs
+ self._writerevs(repo, start)
+
except (IOError, OSError, error.Abort, error.LockError) as inst:
repo.ui.debug("couldn't write revision branch cache%s: %s\n"
% (step, stringutil.forcebytestr(inst)))
finally:
if wlock is not None:
wlock.release()
+
+ def _writenames(self, repo):
+ """ write the new branch names to revbranchcache """
+ if self._rbcnamescount != 0:
+ f = repo.cachevfs.open(_rbcnames, 'ab')
+ if f.tell() == self._rbcsnameslen:
+ f.write('\0')
+ else:
+ f.close()
+ repo.ui.debug("%s changed - rewriting it\n" % _rbcnames)
+ self._rbcnamescount = 0
+ self._rbcrevslen = 0
+ if self._rbcnamescount == 0:
+ # before rewriting names, make sure references are removed
+ repo.cachevfs.unlinkpath(_rbcrevs, ignoremissing=True)
+ f = repo.cachevfs.open(_rbcnames, 'wb')
+ f.write('\0'.join(encoding.fromlocal(b)
+ for b in self._names[self._rbcnamescount:]))
+ self._rbcsnameslen = f.tell()
+ f.close()
+ self._rbcnamescount = len(self._names)
+
+ def _writerevs(self, repo, start):
+ """ write the new revs to revbranchcache """
+ revs = min(len(repo.changelog), len(self._rbcrevs) // _rbcrecsize)
+ with repo.cachevfs.open(_rbcrevs, 'ab') as f:
+ if f.tell() != start:
+ repo.ui.debug("truncating cache/%s to %d\n" % (_rbcrevs, start))
+ f.seek(start)
+ if f.tell() != start:
+ start = 0
+ f.seek(start)
+ f.truncate()
+ end = revs * _rbcrecsize
+ f.write(self._rbcrevs[start:end])
+ self._rbcrevslen = revs
--- a/mercurial/bundle2.py Tue Jul 09 10:07:35 2019 -0400
+++ b/mercurial/bundle2.py Mon Jul 22 14:00:33 2019 -0400
@@ -2298,10 +2298,11 @@
streamclone.applybundlev2(repo, part, filecount, bytecount,
requirements)
-def widen_bundle(repo, oldmatcher, newmatcher, common, known, cgversion,
- ellipses):
+def widen_bundle(bundler, repo, oldmatcher, newmatcher, common,
+ known, cgversion, ellipses):
"""generates bundle2 for widening a narrow clone
+ bundler is the bundle to which data should be added
repo is the localrepository instance
oldmatcher matches what the client already has
newmatcher matches what the client needs (including what it already has)
@@ -2312,7 +2313,6 @@
returns bundle2 of the data required for extending
"""
- bundler = bundle20(repo.ui)
commonnodes = set()
cl = repo.changelog
for r in repo.revs("::%ln", common):
--- a/mercurial/cext/dirs.c Tue Jul 09 10:07:35 2019 -0400
+++ b/mercurial/cext/dirs.c Mon Jul 22 14:00:33 2019 -0400
@@ -42,6 +42,9 @@
break;
pos -= 1;
}
+ if (pos == -1) {
+ return 0;
+ }
return pos;
}
--- a/mercurial/cext/parsers.c Tue Jul 09 10:07:35 2019 -0400
+++ b/mercurial/cext/parsers.c Mon Jul 22 14:00:33 2019 -0400
@@ -667,10 +667,11 @@
void manifest_module_init(PyObject *mod);
void revlog_module_init(PyObject *mod);
-static const int version = 12;
+static const int version = 13;
static void module_init(PyObject *mod)
{
+ PyObject *capsule = NULL;
PyModule_AddIntConstant(mod, "version", version);
/* This module constant has two purposes. First, it lets us unit test
@@ -687,6 +688,12 @@
manifest_module_init(mod);
revlog_module_init(mod);
+ capsule = PyCapsule_New(
+ make_dirstate_tuple,
+ "mercurial.cext.parsers.make_dirstate_tuple_CAPI", NULL);
+ if (capsule != NULL)
+ PyModule_AddObject(mod, "make_dirstate_tuple_CAPI", capsule);
+
if (PyType_Ready(&dirstateTupleType) < 0) {
return;
}
--- a/mercurial/changegroup.py Tue Jul 09 10:07:35 2019 -0400
+++ b/mercurial/changegroup.py Mon Jul 22 14:00:33 2019 -0400
@@ -1061,7 +1061,7 @@
while tmfnodes:
tree, nodes = tmfnodes.popitem()
- should_visit = self._matcher.visitdir(tree[:-1] or '.')
+ should_visit = self._matcher.visitdir(tree[:-1])
if tree and not should_visit:
continue
@@ -1093,7 +1093,7 @@
fullclnodes=self._fullclnodes,
precomputedellipsis=self._precomputedellipsis)
- if not self._oldmatcher.visitdir(store.tree[:-1] or '.'):
+ if not self._oldmatcher.visitdir(store.tree[:-1]):
yield tree, deltas
else:
# 'deltas' is a generator and we need to consume it even if
--- a/mercurial/changelog.py Tue Jul 09 10:07:35 2019 -0400
+++ b/mercurial/changelog.py Mon Jul 22 14:00:33 2019 -0400
@@ -80,25 +80,55 @@
]
return "\0".join(items)
-def encodecopies(copies):
- items = [
- '%s\0%s' % (k, copies[k])
- for k in sorted(copies)
- ]
+def encodecopies(files, copies):
+ items = []
+ for i, dst in enumerate(files):
+ if dst in copies:
+ items.append('%d\0%s' % (i, copies[dst]))
+ if len(items) != len(copies):
+ raise error.ProgrammingError('some copy targets missing from file list')
return "\n".join(items)
-def decodecopies(data):
+def decodecopies(files, data):
try:
copies = {}
+ if not data:
+ return copies
for l in data.split('\n'):
- k, v = l.split('\0')
- copies[k] = v
+ strindex, src = l.split('\0')
+ i = int(strindex)
+ dst = files[i]
+ copies[dst] = src
return copies
- except ValueError:
+ except (ValueError, IndexError):
# Perhaps someone had chosen the same key name (e.g. "p1copies") and
# used different syntax for the value.
return None
+def encodefileindices(files, subset):
+ subset = set(subset)
+ indices = []
+ for i, f in enumerate(files):
+ if f in subset:
+ indices.append('%d' % i)
+ return '\n'.join(indices)
+
+def decodefileindices(files, data):
+ try:
+ subset = []
+ if not data:
+ return subset
+ for strindex in data.split('\n'):
+ i = int(strindex)
+ if i < 0 or i >= len(files):
+ return None
+ subset.append(files[i])
+ return subset
+ except (ValueError, IndexError):
+ # Perhaps someone had chosen the same key name (e.g. "added") and
+ # used different syntax for the value.
+ return None
+
def stripdesc(desc):
"""strip trailing whitespace and leading and trailing empty lines"""
return '\n'.join([l.rstrip() for l in desc.splitlines()]).strip('\n')
@@ -194,6 +224,10 @@
user = attr.ib(default='')
date = attr.ib(default=(0, 0))
files = attr.ib(default=attr.Factory(list))
+ filesadded = attr.ib(default=None)
+ filesremoved = attr.ib(default=None)
+ p1copies = attr.ib(default=None)
+ p2copies = attr.ib(default=None)
description = attr.ib(default='')
class changelogrevision(object):
@@ -298,14 +332,24 @@
return self._text[off[2] + 1:off[3]].split('\n')
@property
+ def filesadded(self):
+ rawindices = self.extra.get('filesadded')
+ return rawindices and decodefileindices(self.files, rawindices)
+
+ @property
+ def filesremoved(self):
+ rawindices = self.extra.get('filesremoved')
+ return rawindices and decodefileindices(self.files, rawindices)
+
+ @property
def p1copies(self):
rawcopies = self.extra.get('p1copies')
- return rawcopies and decodecopies(rawcopies)
+ return rawcopies and decodecopies(self.files, rawcopies)
@property
def p2copies(self):
rawcopies = self.extra.get('p2copies')
- return rawcopies and decodecopies(rawcopies)
+ return rawcopies and decodecopies(self.files, rawcopies)
@property
def description(self):
@@ -380,9 +424,6 @@
if i not in self.filteredrevs:
yield i
- def reachableroots(self, minroot, heads, roots, includepath=False):
- return self.index.reachableroots2(minroot, heads, roots, includepath)
-
def _checknofilteredinrevs(self, revs):
"""raise the appropriate error if 'revs' contains a filtered revision
@@ -562,7 +603,8 @@
return l[3:]
def add(self, manifest, files, desc, transaction, p1, p2,
- user, date=None, extra=None, p1copies=None, p2copies=None):
+ user, date=None, extra=None, p1copies=None, p2copies=None,
+ filesadded=None, filesremoved=None):
# Convert to UTF-8 encoded bytestrings as the very first
# thing: calling any method on a localstr object will turn it
# into a str object and the cached UTF-8 string is thus lost.
@@ -591,17 +633,23 @@
elif branch in (".", "null", "tip"):
raise error.StorageError(_('the name \'%s\' is reserved')
% branch)
- if (p1copies or p2copies) and extra is None:
+ extrasentries = p1copies, p2copies, filesadded, filesremoved
+ if extra is None and any(x is not None for x in extrasentries):
extra = {}
- if p1copies:
- extra['p1copies'] = encodecopies(p1copies)
- if p2copies:
- extra['p2copies'] = encodecopies(p2copies)
+ sortedfiles = sorted(files)
+ if p1copies is not None:
+ extra['p1copies'] = encodecopies(sortedfiles, p1copies)
+ if p2copies is not None:
+ extra['p2copies'] = encodecopies(sortedfiles, p2copies)
+ if filesadded is not None:
+ extra['filesadded'] = encodefileindices(sortedfiles, filesadded)
+ if filesremoved is not None:
+ extra['filesremoved'] = encodefileindices(sortedfiles, filesremoved)
if extra:
extra = encodeextra(extra)
parseddate = "%s %s" % (parseddate, extra)
- l = [hex(manifest), user, parseddate] + sorted(files) + ["", desc]
+ l = [hex(manifest), user, parseddate] + sortedfiles + ["", desc]
text = "\n".join(l)
return self.addrevision(text, transaction, len(self), p1, p2)
--- a/mercurial/chgserver.py Tue Jul 09 10:07:35 2019 -0400
+++ b/mercurial/chgserver.py Mon Jul 22 14:00:33 2019 -0400
@@ -138,7 +138,9 @@
modules.append(__version__)
except ImportError:
pass
- files = [pycompat.sysexecutable]
+ files = []
+ if pycompat.sysexecutable:
+ files.append(pycompat.sysexecutable)
for m in modules:
try:
files.append(pycompat.fsencode(inspect.getabsfile(m)))
--- a/mercurial/cmdutil.py Tue Jul 09 10:07:35 2019 -0400
+++ b/mercurial/cmdutil.py Mon Jul 22 14:00:33 2019 -0400
@@ -38,10 +38,12 @@
pathutil,
phases,
pycompat,
+ repair,
revlog,
rewriteutil,
scmutil,
smartset,
+ state as statemod,
subrepoutil,
templatekw,
templater,
@@ -264,8 +266,8 @@
In the end we'll record interesting changes, and everything else
will be left in place, so the user can continue working.
"""
-
- checkunfinished(repo, commit=True)
+ if not opts.get('interactive-unshelve'):
+ checkunfinished(repo, commit=True)
wctx = repo[None]
merge = len(wctx.parents()) > 1
if merge:
@@ -278,8 +280,8 @@
force = opts.get('force')
if not force:
vdirs = []
+ match = matchmod.badmatch(match, fail)
match.explicitdir = vdirs.append
- match.bad = fail
status = repo.status(match=match)
@@ -618,74 +620,18 @@
return _commentlines(msg)
-def _helpmessage(continuecmd, abortcmd):
- msg = _('To continue: %s\n'
- 'To abort: %s') % (continuecmd, abortcmd)
- return _commentlines(msg)
-
-def _rebasemsg():
- return _helpmessage('hg rebase --continue', 'hg rebase --abort')
-
-def _histeditmsg():
- return _helpmessage('hg histedit --continue', 'hg histedit --abort')
-
-def _unshelvemsg():
- return _helpmessage('hg unshelve --continue', 'hg unshelve --abort')
-
-def _graftmsg():
- return _helpmessage('hg graft --continue', 'hg graft --abort')
-
-def _mergemsg():
- return _helpmessage('hg commit', 'hg merge --abort')
-
-def _bisectmsg():
- msg = _('To mark the changeset good: hg bisect --good\n'
- 'To mark the changeset bad: hg bisect --bad\n'
- 'To abort: hg bisect --reset\n')
- return _commentlines(msg)
-
-def fileexistspredicate(filename):
- return lambda repo: repo.vfs.exists(filename)
-
-def _mergepredicate(repo):
- return len(repo[None].parents()) > 1
-
-STATES = (
- # (state, predicate to detect states, helpful message function)
- ('histedit', fileexistspredicate('histedit-state'), _histeditmsg),
- ('bisect', fileexistspredicate('bisect.state'), _bisectmsg),
- ('graft', fileexistspredicate('graftstate'), _graftmsg),
- ('unshelve', fileexistspredicate('shelvedstate'), _unshelvemsg),
- ('rebase', fileexistspredicate('rebasestate'), _rebasemsg),
- # The merge state is part of a list that will be iterated over.
- # They need to be last because some of the other unfinished states may also
- # be in a merge or update state (eg. rebase, histedit, graft, etc).
- # We want those to have priority.
- ('merge', _mergepredicate, _mergemsg),
-)
-
-def _getrepostate(repo):
- # experimental config: commands.status.skipstates
- skip = set(repo.ui.configlist('commands', 'status.skipstates'))
- for state, statedetectionpredicate, msgfn in STATES:
- if state in skip:
- continue
- if statedetectionpredicate(repo):
- return (state, statedetectionpredicate, msgfn)
-
def morestatus(repo, fm):
- statetuple = _getrepostate(repo)
+ statetuple = statemod.getrepostate(repo)
label = 'status.morestatus'
if statetuple:
- state, statedetectionpredicate, helpfulmsg = statetuple
+ state, helpfulmsg = statetuple
statemsg = _('The repository is in an unfinished *%s* state.') % state
fm.plain('%s\n' % _commentlines(statemsg), label=label)
conmsg = _conflictsmsg(repo)
if conmsg:
fm.plain('%s\n' % conmsg, label=label)
if helpfulmsg:
- helpmsg = helpfulmsg()
- fm.plain('%s\n' % helpmsg, label=label)
+ fm.plain('%s\n' % _commentlines(helpfulmsg), label=label)
def findpossible(cmd, table, strict=False):
"""
@@ -1668,6 +1614,14 @@
_exportsingle(repo, ctx, fm, match, switch_parent, seqno,
diffopts)
+def _prefetchchangedfiles(repo, revs, match):
+ allfiles = set()
+ for rev in revs:
+ for file in repo[rev].files():
+ if not match or match(file):
+ allfiles.add(file)
+ scmutil.prefetchfiles(repo, revs, scmutil.matchfiles(repo, allfiles))
+
def export(repo, revs, basefm, fntemplate='hg-%h.patch', switch_parent=False,
opts=None, match=None):
'''export changesets as hg patches
@@ -1692,7 +1646,7 @@
the given template.
Otherwise: All revs will be written to basefm.
'''
- scmutil.prefetchfiles(repo, revs, match)
+ _prefetchchangedfiles(repo, revs, match)
if not fntemplate:
_exportfile(repo, revs, basefm, '<unnamed>', switch_parent, opts, match)
@@ -1702,7 +1656,7 @@
def exportfile(repo, revs, fp, switch_parent=False, opts=None, match=None):
"""Export changesets to the given file stream"""
- scmutil.prefetchfiles(repo, revs, match)
+ _prefetchchangedfiles(repo, revs, match)
dest = getattr(fp, 'name', '<unnamed>')
with formatter.formatter(repo.ui, fp, 'export', {}) as fm:
@@ -2345,14 +2299,22 @@
return ret
+def _catfmtneedsdata(fm):
+ return not fm.datahint() or 'data' in fm.datahint()
+
def _updatecatformatter(fm, ctx, matcher, path, decode):
"""Hook for adding data to the formatter used by ``hg cat``.
Extensions (e.g., lfs) can wrap this to inject keywords/data, but must call
this method first."""
- data = ctx[path].data()
- if decode:
- data = ctx.repo().wwritedata(path, data)
+
+ # data() can be expensive to fetch (e.g. lfs), so don't fetch it if it
+ # wasn't requested.
+ data = b''
+ if _catfmtneedsdata(fm):
+ data = ctx[path].data()
+ if decode:
+ data = ctx.repo().wwritedata(path, data)
fm.startitem()
fm.context(ctx=ctx)
fm.write('data', '%s', data)
@@ -2383,13 +2345,15 @@
mfnode = ctx.manifestnode()
try:
if mfnode and mfl[mfnode].find(file)[0]:
- scmutil.prefetchfiles(repo, [ctx.rev()], matcher)
+ if _catfmtneedsdata(basefm):
+ scmutil.prefetchfiles(repo, [ctx.rev()], matcher)
write(file)
return 0
except KeyError:
pass
- scmutil.prefetchfiles(repo, [ctx.rev()], matcher)
+ if _catfmtneedsdata(basefm):
+ scmutil.prefetchfiles(repo, [ctx.rev()], matcher)
for abs in ctx.walk(matcher):
write(abs)
@@ -2583,12 +2547,18 @@
message = logmessage(ui, opts)
editform = mergeeditform(old, 'commit.amend')
- editor = getcommiteditor(editform=editform,
- **pycompat.strkwargs(opts))
if not message:
- editor = getcommiteditor(edit=True, editform=editform)
message = old.description()
+ # Default if message isn't provided and --edit is not passed is to
+ # invoke editor, but allow --no-edit. If somehow we don't have any
+ # description, let's always start the editor.
+ doedit = not message or opts.get('edit') in [True, None]
+ else:
+ # Default if message is provided is to not invoke editor, but allow
+ # --edit.
+ doedit = opts.get('edit') is True
+ editor = getcommiteditor(edit=doedit, editform=editform)
pureextra = extra.copy()
extra['amend_source'] = old.hex()
@@ -3289,66 +3259,69 @@
# - (desturl, destbranch, destpeer, outgoing)
summaryremotehooks = util.hooks()
-# A list of state files kept by multistep operations like graft.
-# Since graft cannot be aborted, it is considered 'clearable' by update.
-# note: bisect is intentionally excluded
-# (state file, clearable, allowcommit, error, hint)
-unfinishedstates = [
- ('graftstate', True, False, _('graft in progress'),
- _("use 'hg graft --continue' or 'hg graft --stop' to stop")),
- ('updatestate', True, False, _('last update was interrupted'),
- _("use 'hg update' to get a consistent checkout"))
- ]
-
-def checkunfinished(repo, commit=False):
+
+def checkunfinished(repo, commit=False, skipmerge=False):
'''Look for an unfinished multistep operation, like graft, and abort
if found. It's probably good to check this right before
bailifchanged().
'''
# Check for non-clearable states first, so things like rebase will take
# precedence over update.
- for f, clearable, allowcommit, msg, hint in unfinishedstates:
- if clearable or (commit and allowcommit):
+ for state in statemod._unfinishedstates:
+ if (state._clearable or (commit and state._allowcommit) or
+ state._reportonly):
continue
- if repo.vfs.exists(f):
- raise error.Abort(msg, hint=hint)
-
- for f, clearable, allowcommit, msg, hint in unfinishedstates:
- if not clearable or (commit and allowcommit):
+ if state.isunfinished(repo):
+ raise error.Abort(state.msg(), hint=state.hint())
+
+ for s in statemod._unfinishedstates:
+ if (not s._clearable or (commit and s._allowcommit) or
+ (s._opname == 'merge' and skipmerge) or s._reportonly):
continue
- if repo.vfs.exists(f):
- raise error.Abort(msg, hint=hint)
+ if s.isunfinished(repo):
+ raise error.Abort(s.msg(), hint=s.hint())
def clearunfinished(repo):
'''Check for unfinished operations (as above), and clear the ones
that are clearable.
'''
- for f, clearable, allowcommit, msg, hint in unfinishedstates:
- if not clearable and repo.vfs.exists(f):
- raise error.Abort(msg, hint=hint)
- for f, clearable, allowcommit, msg, hint in unfinishedstates:
- if clearable and repo.vfs.exists(f):
- util.unlink(repo.vfs.join(f))
-
-afterresolvedstates = [
- ('graftstate',
- _('hg graft --continue')),
- ]
+ for state in statemod._unfinishedstates:
+ if state._reportonly:
+ continue
+ if not state._clearable and state.isunfinished(repo):
+ raise error.Abort(state.msg(), hint=state.hint())
+
+ for s in statemod._unfinishedstates:
+ if s._opname == 'merge' or state._reportonly:
+ continue
+ if s._clearable and s.isunfinished(repo):
+ util.unlink(repo.vfs.join(s._fname))
+
+def getunfinishedstate(repo):
+ ''' Checks for unfinished operations and returns statecheck object
+ for it'''
+ for state in statemod._unfinishedstates:
+ if state.isunfinished(repo):
+ return state
+ return None
def howtocontinue(repo):
'''Check for an unfinished operation and return the command to finish
it.
- afterresolvedstates tuples define a .hg/{file} and the corresponding
- command needed to finish it.
+ statemod._unfinishedstates list is checked for an unfinished operation
+ and the corresponding message to finish it is generated if a method to
+ continue is supported by the operation.
Returns a (msg, warning) tuple. 'msg' is a string and 'warning' is
a boolean.
'''
contmsg = _("continue: %s")
- for f, msg in afterresolvedstates:
- if repo.vfs.exists(f):
- return contmsg % msg, True
+ for state in statemod._unfinishedstates:
+ if not state._continueflag:
+ continue
+ if state.isunfinished(repo):
+ return contmsg % state.continuemsg(), True
if repo[None].dirty(missing=True, merge=False, branch=False):
return contmsg % _("hg commit"), False
return None, None
@@ -3356,8 +3329,8 @@
def checkafterresolved(repo):
'''Inform the user about the next action after completing hg resolve
- If there's a matching afterresolvedstates, howtocontinue will yield
- repo.ui.warn as the reporter.
+ If there's a an unfinished operation that supports continue flag,
+ howtocontinue will yield repo.ui.warn as the reporter.
Otherwise, it will yield repo.ui.note.
'''
@@ -3382,3 +3355,73 @@
if after[1]:
hint = after[0]
raise error.Abort(_('no %s in progress') % task, hint=hint)
+
+def abortgraft(ui, repo, graftstate):
+ """abort the interrupted graft and rollbacks to the state before interrupted
+ graft"""
+ if not graftstate.exists():
+ raise error.Abort(_("no interrupted graft to abort"))
+ statedata = readgraftstate(repo, graftstate)
+ newnodes = statedata.get('newnodes')
+ if newnodes is None:
+ # and old graft state which does not have all the data required to abort
+ # the graft
+ raise error.Abort(_("cannot abort using an old graftstate"))
+
+ # changeset from which graft operation was started
+ if len(newnodes) > 0:
+ startctx = repo[newnodes[0]].p1()
+ else:
+ startctx = repo['.']
+ # whether to strip or not
+ cleanup = False
+ from . import hg
+ if newnodes:
+ newnodes = [repo[r].rev() for r in newnodes]
+ cleanup = True
+ # checking that none of the newnodes turned public or is public
+ immutable = [c for c in newnodes if not repo[c].mutable()]
+ if immutable:
+ repo.ui.warn(_("cannot clean up public changesets %s\n")
+ % ', '.join(bytes(repo[r]) for r in immutable),
+ hint=_("see 'hg help phases' for details"))
+ cleanup = False
+
+ # checking that no new nodes are created on top of grafted revs
+ desc = set(repo.changelog.descendants(newnodes))
+ if desc - set(newnodes):
+ repo.ui.warn(_("new changesets detected on destination "
+ "branch, can't strip\n"))
+ cleanup = False
+
+ if cleanup:
+ with repo.wlock(), repo.lock():
+ hg.updaterepo(repo, startctx.node(), overwrite=True)
+ # stripping the new nodes created
+ strippoints = [c.node() for c in repo.set("roots(%ld)",
+ newnodes)]
+ repair.strip(repo.ui, repo, strippoints, backup=False)
+
+ if not cleanup:
+ # we don't update to the startnode if we can't strip
+ startctx = repo['.']
+ hg.updaterepo(repo, startctx.node(), overwrite=True)
+
+ ui.status(_("graft aborted\n"))
+ ui.status(_("working directory is now at %s\n") % startctx.hex()[:12])
+ graftstate.delete()
+ return 0
+
+def readgraftstate(repo, graftstate):
+ """read the graft state file and return a dict of the data stored in it"""
+ try:
+ return graftstate.read()
+ except error.CorruptedState:
+ nodes = repo.vfs.read('graftstate').splitlines()
+ return {'nodes': nodes}
+
+def hgabortgraft(ui, repo):
+ """ abort logic for aborting graft using 'hg abort'"""
+ with repo.wlock():
+ graftstate = statemod.cmdstate(repo, 'graftstate')
+ return abortgraft(ui, repo, graftstate)
--- a/mercurial/commands.py Tue Jul 09 10:07:35 2019 -0400
+++ b/mercurial/commands.py Mon Jul 22 14:00:33 2019 -0400
@@ -53,16 +53,17 @@
pycompat,
rcutil,
registrar,
- repair,
revsetlang,
rewriteutil,
scmutil,
server,
+ shelve as shelvemod,
state as statemod,
streamclone,
tags as tagsmod,
ui as uimod,
util,
+ verify as verifymod,
wireprotoserver,
)
from .utils import (
@@ -130,6 +131,29 @@
# Commands start here, listed alphabetically
+@command('abort',
+ dryrunopts, helpcategory=command.CATEGORY_CHANGE_MANAGEMENT,
+ helpbasic=True)
+def abort(ui, repo, **opts):
+ """abort an unfinished operation (EXPERIMENTAL)
+
+ Aborts a multistep operation like graft, histedit, rebase, merge,
+ and unshelve if they are in an unfinished state.
+
+ use --dry-run/-n to dry run the command.
+ """
+ dryrun = opts.get(r'dry_run')
+ abortstate = cmdutil.getunfinishedstate(repo)
+ if not abortstate:
+ raise error.Abort(_('no operation in progress'))
+ if not abortstate.abortfunc:
+ raise error.Abort((_("%s in progress but does not support 'hg abort'") %
+ (abortstate._opname)), hint=abortstate.hint())
+ if dryrun:
+ ui.status(_('%s in progress, will be aborted\n') % (abortstate._opname))
+ return
+ return abortstate.abortfunc(ui, repo)
+
@command('add',
walkopts + subrepoopts + dryrunopts,
_('[OPTION]... [FILE]...'),
@@ -1582,6 +1606,8 @@
('', 'amend', None, _('amend the parent of the working directory')),
('s', 'secret', None, _('use the secret phase for committing')),
('e', 'edit', None, _('invoke editor on commit messages')),
+ ('', 'force-close-branch', None,
+ _('forcibly close branch from a non-head changeset (ADVANCED)')),
('i', 'interactive', None, _('use interactive mode')),
] + walkopts + commitopts + commitopts2 + subrepoopts,
_('[OPTION]... [FILE]...'),
@@ -1669,11 +1695,19 @@
bheads = repo.branchheads(branch)
extra = {}
- if opts.get('close_branch'):
+ if opts.get('close_branch') or opts.get('force_close_branch'):
extra['close'] = '1'
- if not bheads:
- raise error.Abort(_('can only close branch heads'))
+ if repo['.'].closesbranch():
+ raise error.Abort(_('current revision is already a branch closing'
+ ' head'))
+ elif not bheads:
+ raise error.Abort(_('branch "%s" has no heads to close') % branch)
+ elif (branch == repo['.'].branch() and repo['.'].node() not in bheads
+ and not opts.get('force_close_branch')):
+ hint = _('use --force-close-branch to close branch from a non-head'
+ ' changeset')
+ raise error.Abort(_('can only close branch heads'), hint=hint)
elif opts.get('amend'):
if (repo['.'].p1().branch() != branch and
repo['.'].p2().branch() != branch):
@@ -1732,6 +1766,10 @@
cmdutil.commitstatus(repo, node, branch, bheads, opts)
+ if not ui.quiet and ui.configbool('commands', 'commit.post-status'):
+ status(ui, repo, modified=True, added=True, removed=True, deleted=True,
+ unknown=True, subrepos=opts.get('subrepos'))
+
@command('config|showconfig|debugconfig',
[('u', 'untrusted', None, _('show untrusted configuration options')),
('e', 'edit', None, _('edit user config')),
@@ -1853,6 +1891,30 @@
return 0
return 1
+@command('continue',
+ dryrunopts, helpcategory=command.CATEGORY_CHANGE_MANAGEMENT,
+ helpbasic=True)
+def continuecmd(ui, repo, **opts):
+ """resumes an interrupted operation (EXPERIMENTAL)
+
+ Finishes a multistep operation like graft, histedit, rebase, merge,
+ and unshelve if they are in an interrupted state.
+
+ use --dry-run/-n to dry run the command.
+ """
+ dryrun = opts.get(r'dry_run')
+ contstate = cmdutil.getunfinishedstate(repo)
+ if not contstate:
+ raise error.Abort(_('no operation in progress'))
+ if not contstate.continuefunc:
+ raise error.Abort((_("%s in progress but does not support "
+ "'hg continue'") % (contstate._opname)),
+ hint=contstate.continuemsg())
+ if dryrun:
+ ui.status(_('%s in progress, will be resumed\n') % (contstate._opname))
+ return
+ return contstate.continuefunc(ui, repo)
+
@command('copy|cp',
[('A', 'after', None, _('record a copy that has already occurred')),
('f', 'force', None, _('forcibly copy over an existing managed file')),
@@ -2449,14 +2511,14 @@
opts.get('currentuser'), opts.get('rev'))):
raise error.Abort(_("cannot specify any other flag with '--abort'"))
- return _abortgraft(ui, repo, graftstate)
+ return cmdutil.abortgraft(ui, repo, graftstate)
elif opts.get('continue'):
cont = True
if revs:
raise error.Abort(_("can't specify --continue and revisions"))
# read in unfinished revisions
if graftstate.exists():
- statedata = _readgraftstate(repo, graftstate)
+ statedata = cmdutil.readgraftstate(repo, graftstate)
if statedata.get('date'):
opts['date'] = statedata['date']
if statedata.get('user'):
@@ -2626,69 +2688,6 @@
return 0
-def _abortgraft(ui, repo, graftstate):
- """abort the interrupted graft and rollbacks to the state before interrupted
- graft"""
- if not graftstate.exists():
- raise error.Abort(_("no interrupted graft to abort"))
- statedata = _readgraftstate(repo, graftstate)
- newnodes = statedata.get('newnodes')
- if newnodes is None:
- # and old graft state which does not have all the data required to abort
- # the graft
- raise error.Abort(_("cannot abort using an old graftstate"))
-
- # changeset from which graft operation was started
- if len(newnodes) > 0:
- startctx = repo[newnodes[0]].p1()
- else:
- startctx = repo['.']
- # whether to strip or not
- cleanup = False
- if newnodes:
- newnodes = [repo[r].rev() for r in newnodes]
- cleanup = True
- # checking that none of the newnodes turned public or is public
- immutable = [c for c in newnodes if not repo[c].mutable()]
- if immutable:
- repo.ui.warn(_("cannot clean up public changesets %s\n")
- % ', '.join(bytes(repo[r]) for r in immutable),
- hint=_("see 'hg help phases' for details"))
- cleanup = False
-
- # checking that no new nodes are created on top of grafted revs
- desc = set(repo.changelog.descendants(newnodes))
- if desc - set(newnodes):
- repo.ui.warn(_("new changesets detected on destination "
- "branch, can't strip\n"))
- cleanup = False
-
- if cleanup:
- with repo.wlock(), repo.lock():
- hg.updaterepo(repo, startctx.node(), overwrite=True)
- # stripping the new nodes created
- strippoints = [c.node() for c in repo.set("roots(%ld)",
- newnodes)]
- repair.strip(repo.ui, repo, strippoints, backup=False)
-
- if not cleanup:
- # we don't update to the startnode if we can't strip
- startctx = repo['.']
- hg.updaterepo(repo, startctx.node(), overwrite=True)
-
- ui.status(_("graft aborted\n"))
- ui.status(_("working directory is now at %s\n") % startctx.hex()[:12])
- graftstate.delete()
- return 0
-
-def _readgraftstate(repo, graftstate):
- """read the graft state file and return a dict of the data stored in it"""
- try:
- return graftstate.read()
- except error.CorruptedState:
- nodes = repo.vfs.read('graftstate').splitlines()
- return {'nodes': nodes}
-
def _stopgraft(ui, repo, graftstate):
"""stop the interrupted graft"""
if not graftstate.exists():
@@ -2700,6 +2699,12 @@
ui.status(_("working directory is now at %s\n") % pctx.hex()[:12])
return 0
+statemod.addunfinished(
+ 'graft', fname='graftstate', clearable=True, stopflag=True,
+ continueflag=True, abortfunc=cmdutil.hgabortgraft,
+ cmdhint=_("use 'hg graft --continue' or 'hg graft --stop' to stop")
+)
+
@command('grep',
[('0', 'print0', None, _('end fields with NUL')),
('', 'all', None, _('print all revisions that match (DEPRECATED) ')),
@@ -3715,7 +3720,8 @@
_('follow line range of specified file (EXPERIMENTAL)'),
_('FILE,RANGE')),
('', 'removed', None, _('include revisions where files were removed')),
- ('m', 'only-merges', None, _('show only merges (DEPRECATED)')),
+ ('m', 'only-merges', None,
+ _('show only merges (DEPRECATED) (use -r "merge()" instead)')),
('u', 'user', [], _('revisions committed by user'), _('USER')),
('', 'only-branch', [],
_('show only changesets within the given named branch (DEPRECATED)'),
@@ -3876,12 +3882,12 @@
# then filter the result by logcmdutil._makerevset() and --limit
revs, differ = logcmdutil.getlinerangerevs(repo, revs, opts)
- getrenamed = None
+ getcopies = None
if opts.get('copies'):
endrev = None
if revs:
endrev = revs.max() + 1
- getrenamed = scmutil.getrenamedfn(repo, endrev=endrev)
+ getcopies = scmutil.getcopiesfn(repo, endrev=endrev)
ui.pager('log')
displayer = logcmdutil.changesetdisplayer(ui, repo, opts, differ,
@@ -3890,7 +3896,7 @@
displayfn = logcmdutil.displaygraphrevs
else:
displayfn = logcmdutil.displayrevs
- displayfn(ui, repo, revs, displayer, getrenamed)
+ displayfn(ui, repo, revs, displayer, getcopies)
@command('manifest',
[('r', 'rev', '', _('revision to display'), _('REV')),
@@ -3983,7 +3989,7 @@
If no revision is specified, the working directory's parent is a
head revision, and the current branch contains exactly one other
head, the other head is merged with by default. Otherwise, an
- explicit revision with which to merge with must be provided.
+ explicit revision with which to merge must be provided.
See :hg:`help resolve` for information on handling file conflicts.
@@ -3999,6 +4005,10 @@
if abort and repo.dirstate.p2() == nullid:
cmdutil.wrongtooltocontinue(repo, _('merge'))
if abort:
+ state = cmdutil.getunfinishedstate(repo)
+ if state and state._opname != 'merge':
+ raise error.Abort(_('cannot abort merge with %s in progress') %
+ (state._opname), hint=state.hint())
if node:
raise error.Abort(_("cannot specify a node with --abort"))
if opts.get('rev'):
@@ -4036,6 +4046,14 @@
return hg.merge(repo, node, force=force, mergeforce=force,
labels=labels, abort=abort)
+statemod.addunfinished(
+ 'merge', fname=None, clearable=True, allowcommit=True,
+ cmdmsg=_('outstanding uncommitted merge'), abortfunc=hg.abortmerge,
+ statushint=_('To continue: hg commit\n'
+ 'To abort: hg merge --abort'),
+ cmdhint=_("use 'hg commit' or 'hg merge --abort'")
+)
+
@command('outgoing|out',
[('f', 'force', None, _('run even when the destination is unrelated')),
('r', 'rev', [],
@@ -4672,7 +4690,7 @@
"""
ret = repo.recover()
if ret:
- if opts['verify']:
+ if opts[r'verify']:
return hg.verify(repo)
else:
msg = _("(verify step skipped, run `hg verify` to check your "
@@ -5217,16 +5235,30 @@
force=opts.get(r'force'))
@command(
- 'root', [], intents={INTENT_READONLY},
+ 'root', [] + formatteropts, intents={INTENT_READONLY},
helpcategory=command.CATEGORY_WORKING_DIRECTORY)
-def root(ui, repo):
+def root(ui, repo, **opts):
"""print the root (top) of the current working directory
Print the root directory of the current repository.
+ .. container:: verbose
+
+ Template:
+
+ The following keywords are supported in addition to the common template
+ keywords and functions. See also :hg:`help templates`.
+
+ :hgpath: String. Path to the .hg directory.
+ :storepath: String. Path to the directory holding versioned data.
+
Returns 0 on success.
"""
- ui.write(repo.root + "\n")
+ opts = pycompat.byteskwargs(opts)
+ with ui.formatter('root', opts) as fm:
+ fm.startitem()
+ fm.write('reporoot', '%s\n', repo.root)
+ fm.data(hgpath=repo.path, storepath=repo.spath)
@command('serve',
[('A', 'accesslog', '', _('name of access log file to write to'),
@@ -5299,6 +5331,106 @@
service = server.createservice(ui, repo, opts)
return server.runservice(opts, initfn=service.init, runfn=service.run)
+@command('shelve',
+ [('A', 'addremove', None,
+ _('mark new/missing files as added/removed before shelving')),
+ ('u', 'unknown', None,
+ _('store unknown files in the shelve')),
+ ('', 'cleanup', None,
+ _('delete all shelved changes')),
+ ('', 'date', '',
+ _('shelve with the specified commit date'), _('DATE')),
+ ('d', 'delete', None,
+ _('delete the named shelved change(s)')),
+ ('e', 'edit', False,
+ _('invoke editor on commit messages')),
+ ('k', 'keep', False,
+ _('shelve, but keep changes in the working directory')),
+ ('l', 'list', None,
+ _('list current shelves')),
+ ('m', 'message', '',
+ _('use text as shelve message'), _('TEXT')),
+ ('n', 'name', '',
+ _('use the given name for the shelved commit'), _('NAME')),
+ ('p', 'patch', None,
+ _('output patches for changes (provide the names of the shelved '
+ 'changes as positional arguments)')),
+ ('i', 'interactive', None,
+ _('interactive mode')),
+ ('', 'stat', None,
+ _('output diffstat-style summary of changes (provide the names of '
+ 'the shelved changes as positional arguments)')
+ )] + cmdutil.walkopts,
+ _('hg shelve [OPTION]... [FILE]...'),
+ helpcategory=command.CATEGORY_WORKING_DIRECTORY)
+def shelve(ui, repo, *pats, **opts):
+ '''save and set aside changes from the working directory
+
+ Shelving takes files that "hg status" reports as not clean, saves
+ the modifications to a bundle (a shelved change), and reverts the
+ files so that their state in the working directory becomes clean.
+
+ To restore these changes to the working directory, using "hg
+ unshelve"; this will work even if you switch to a different
+ commit.
+
+ When no files are specified, "hg shelve" saves all not-clean
+ files. If specific files or directories are named, only changes to
+ those files are shelved.
+
+ In bare shelve (when no files are specified, without interactive,
+ include and exclude option), shelving remembers information if the
+ working directory was on newly created branch, in other words working
+ directory was on different branch than its first parent. In this
+ situation unshelving restores branch information to the working directory.
+
+ Each shelved change has a name that makes it easier to find later.
+ The name of a shelved change defaults to being based on the active
+ bookmark, or if there is no active bookmark, the current named
+ branch. To specify a different name, use ``--name``.
+
+ To see a list of existing shelved changes, use the ``--list``
+ option. For each shelved change, this will print its name, age,
+ and description; use ``--patch`` or ``--stat`` for more details.
+
+ To delete specific shelved changes, use ``--delete``. To delete
+ all shelved changes, use ``--cleanup``.
+ '''
+ opts = pycompat.byteskwargs(opts)
+ allowables = [
+ ('addremove', {'create'}), # 'create' is pseudo action
+ ('unknown', {'create'}),
+ ('cleanup', {'cleanup'}),
+# ('date', {'create'}), # ignored for passing '--date "0 0"' in tests
+ ('delete', {'delete'}),
+ ('edit', {'create'}),
+ ('keep', {'create'}),
+ ('list', {'list'}),
+ ('message', {'create'}),
+ ('name', {'create'}),
+ ('patch', {'patch', 'list'}),
+ ('stat', {'stat', 'list'}),
+ ]
+ def checkopt(opt):
+ if opts.get(opt):
+ for i, allowable in allowables:
+ if opts[i] and opt not in allowable:
+ raise error.Abort(_("options '--%s' and '--%s' may not be "
+ "used together") % (opt, i))
+ return True
+ if checkopt('cleanup'):
+ if pats:
+ raise error.Abort(_("cannot specify names when using '--cleanup'"))
+ return shelvemod.cleanupcmd(ui, repo)
+ elif checkopt('delete'):
+ return shelvemod.deletecmd(ui, repo, pats)
+ elif checkopt('list'):
+ return shelvemod.listcmd(ui, repo, pats, opts)
+ elif checkopt('patch') or checkopt('stat'):
+ return shelvemod.patchcmds(ui, repo, pats, opts)
+ else:
+ return shelvemod.createcmd(ui, repo, pats, opts)
+
_NOTTERSE = 'nothing'
@command('status|st',
@@ -6027,6 +6159,68 @@
return postincoming(ui, repo, modheads, opts.get(r'update'), None, None)
+@command('unshelve',
+ [('a', 'abort', None,
+ _('abort an incomplete unshelve operation')),
+ ('c', 'continue', None,
+ _('continue an incomplete unshelve operation')),
+ ('i', 'interactive', None,
+ _('use interactive mode (EXPERIMENTAL)')),
+ ('k', 'keep', None,
+ _('keep shelve after unshelving')),
+ ('n', 'name', '',
+ _('restore shelved change with given name'), _('NAME')),
+ ('t', 'tool', '', _('specify merge tool')),
+ ('', 'date', '',
+ _('set date for temporary commits (DEPRECATED)'), _('DATE'))],
+ _('hg unshelve [OPTION]... [FILE]... [-n SHELVED]'),
+ helpcategory=command.CATEGORY_WORKING_DIRECTORY)
+def unshelve(ui, repo, *shelved, **opts):
+ """restore a shelved change to the working directory
+
+ This command accepts an optional name of a shelved change to
+ restore. If none is given, the most recent shelved change is used.
+
+ If a shelved change is applied successfully, the bundle that
+ contains the shelved changes is moved to a backup location
+ (.hg/shelve-backup).
+
+ Since you can restore a shelved change on top of an arbitrary
+ commit, it is possible that unshelving will result in a conflict
+ between your changes and the commits you are unshelving onto. If
+ this occurs, you must resolve the conflict, then use
+ ``--continue`` to complete the unshelve operation. (The bundle
+ will not be moved until you successfully complete the unshelve.)
+
+ (Alternatively, you can use ``--abort`` to abandon an unshelve
+ that causes a conflict. This reverts the unshelved changes, and
+ leaves the bundle in place.)
+
+ If bare shelved change (when no files are specified, without interactive,
+ include and exclude option) was done on newly created branch it would
+ restore branch information to the working directory.
+
+ After a successful unshelve, the shelved changes are stored in a
+ backup directory. Only the N most recent backups are kept. N
+ defaults to 10 but can be overridden using the ``shelve.maxbackups``
+ configuration option.
+
+ .. container:: verbose
+
+ Timestamp in seconds is used to decide order of backups. More
+ than ``maxbackups`` backups are kept, if same timestamp
+ prevents from deciding exact order of them, for safety.
+ """
+ with repo.wlock():
+ return shelvemod.dounshelve(ui, repo, *shelved, **opts)
+
+statemod.addunfinished(
+ 'unshelve', fname='shelvedstate', continueflag=True,
+ abortfunc=shelvemod.hgabortunshelve,
+ continuefunc=shelvemod.hgcontinueunshelve,
+ cmdmsg=_('unshelve already in progress'),
+)
+
@command('update|up|checkout|co',
[('C', 'clean', None, _('discard uncommitted changes (no backup)')),
('c', 'check', None, _('require clean working directory')),
@@ -6123,7 +6317,6 @@
with repo.wlock():
cmdutil.clearunfinished(repo)
-
if date:
rev = cmdutil.finddate(ui, repo, date)
@@ -6147,8 +6340,10 @@
ui.warn("(%s)\n" % obsfatemsg)
return ret
-@command('verify', [], helpcategory=command.CATEGORY_MAINTENANCE)
-def verify(ui, repo):
+@command('verify',
+ [('', 'full', False, 'perform more checks (EXPERIMENTAL)')],
+ helpcategory=command.CATEGORY_MAINTENANCE)
+def verify(ui, repo, **opts):
"""verify the integrity of the repository
Verify the integrity of the current repository.
@@ -6164,7 +6359,12 @@
Returns 0 on success, 1 if errors are encountered.
"""
- return hg.verify(repo)
+ opts = pycompat.byteskwargs(opts)
+
+ level = None
+ if opts['full']:
+ level = verifymod.VERIFY_FULL
+ return hg.verify(repo, level)
@command(
'version', [] + formatteropts, helpcategory=command.CATEGORY_HELP,
@@ -6233,16 +6433,6 @@
def loadcmdtable(ui, name, cmdtable):
"""Load command functions from specified cmdtable
"""
- cmdtable = cmdtable.copy()
- for cmd in list(cmdtable):
- if not cmd.startswith('^'):
- continue
- ui.deprecwarn("old-style command registration '%s' in extension '%s'"
- % (cmd, name), '4.8')
- entry = cmdtable.pop(cmd)
- entry[0].helpbasic = True
- cmdtable[cmd[1:]] = entry
-
overrides = [cmd for cmd in cmdtable if cmd in table]
if overrides:
ui.warn(_("extension '%s' overrides commands: %s\n")
--- a/mercurial/configitems.py Tue Jul 09 10:07:35 2019 -0400
+++ b/mercurial/configitems.py Mon Jul 22 14:00:33 2019 -0400
@@ -202,6 +202,9 @@
default=dynamicdefault,
)
_registerdiffopts(section='commands', configprefix='commit.interactive.')
+coreconfigitem('commands', 'commit.post-status',
+ default=False,
+)
coreconfigitem('commands', 'grep.all-files',
default=False,
)
@@ -288,6 +291,9 @@
coreconfigitem('convert', 'hg.ignoreerrors',
default=False,
)
+coreconfigitem('convert', 'hg.preserve-hash',
+ default=False,
+)
coreconfigitem('convert', 'hg.revs',
default=None,
)
@@ -526,12 +532,22 @@
coreconfigitem('experimental', 'evolution.bundle-obsmarker',
default=False,
)
+coreconfigitem('experimental', 'log.topo',
+ default=False,
+)
coreconfigitem('experimental', 'evolution.report-instabilities',
default=True,
)
coreconfigitem('experimental', 'evolution.track-operation',
default=True,
)
+# repo-level config to exclude a revset visibility
+#
+# The target use case is to use `share` to expose different subset of the same
+# repository, especially server side. See also `server.view`.
+coreconfigitem('experimental', 'extra-filter-revs',
+ default=None,
+)
coreconfigitem('experimental', 'maxdeltachainspan',
default=-1,
)
@@ -663,6 +679,9 @@
default=None,
generic=True,
)
+coreconfigitem('format', 'bookmarks-in-store',
+ default=False,
+)
coreconfigitem('format', 'chunkcachesize',
default=None,
)
@@ -931,6 +950,9 @@
coreconfigitem('profiling', 'showmin',
default=dynamicdefault,
)
+coreconfigitem('profiling', 'showtime',
+ default=True,
+)
coreconfigitem('profiling', 'sort',
default='inlinetime',
)
@@ -1072,6 +1094,9 @@
coreconfigitem('share', 'poolnaming',
default='identity',
)
+coreconfigitem('shelve','maxbackups',
+ default=10,
+)
coreconfigitem('smtp', 'host',
default=None,
)
--- a/mercurial/context.py Tue Jul 09 10:07:35 2019 -0400
+++ b/mercurial/context.py Mon Jul 22 14:00:33 2019 -0400
@@ -272,6 +272,30 @@
except error.LookupError:
return ''
+ @propertycache
+ def _copies(self):
+ p1copies = {}
+ p2copies = {}
+ p1 = self.p1()
+ p2 = self.p2()
+ narrowmatch = self._repo.narrowmatch()
+ for dst in self.files():
+ if not narrowmatch(dst) or dst not in self:
+ continue
+ copied = self[dst].renamed()
+ if not copied:
+ continue
+ src, srcnode = copied
+ if src in p1 and p1[src].filenode() == srcnode:
+ p1copies[dst] = src
+ elif src in p2 and p2[src].filenode() == srcnode:
+ p2copies[dst] = src
+ return p1copies, p2copies
+ def p1copies(self):
+ return self._copies[0]
+ def p2copies(self):
+ return self._copies[1]
+
def sub(self, path, allowcreate=True):
'''return a subrepo for the stored revision of path, never wdir()'''
return subrepo.subrepo(self, path, allowcreate=allowcreate)
@@ -439,6 +463,36 @@
return self._changeset.date
def files(self):
return self._changeset.files
+ def filesmodified(self):
+ modified = set(self.files())
+ modified.difference_update(self.filesadded())
+ modified.difference_update(self.filesremoved())
+ return sorted(modified)
+ def filesadded(self):
+ source = self._repo.ui.config('experimental', 'copies.read-from')
+ if (source == 'changeset-only' or
+ (source == 'compatibility' and
+ self._changeset.filesadded is not None)):
+ return self._changeset.filesadded or []
+
+ added = []
+ for f in self.files():
+ if not any(f in p for p in self.parents()):
+ added.append(f)
+ return added
+ def filesremoved(self):
+ source = self._repo.ui.config('experimental', 'copies.read-from')
+ if (source == 'changeset-only' or
+ (source == 'compatibility' and
+ self._changeset.filesremoved is not None)):
+ return self._changeset.filesremoved or []
+
+ removed = []
+ for f in self.files():
+ if f not in self:
+ removed.append(f)
+ return removed
+
@propertycache
def _copies(self):
source = self._repo.ui.config('experimental', 'copies.read-from')
@@ -456,27 +510,7 @@
# Otherwise (config said to read only from filelog, or we are in
# compatiblity mode and there is not data in the changeset), we get
# the copy metadata from the filelogs.
- p1copies = {}
- p2copies = {}
- p1 = self.p1()
- p2 = self.p2()
- narrowmatch = self._repo.narrowmatch()
- for dst in self.files():
- if not narrowmatch(dst) or dst not in self:
- continue
- copied = self[dst].renamed()
- if not copied:
- continue
- src, srcnode = copied
- if src in p1 and p1[src].filenode() == srcnode:
- p1copies[dst] = src
- elif src in p2 and p2[src].filenode() == srcnode:
- p2copies[dst] = src
- return p1copies, p2copies
- def p1copies(self):
- return self._copies[0]
- def p2copies(self):
- return self._copies[1]
+ return super(changectx, self)._copies
def description(self):
return self._changeset.description
def branch(self):
@@ -1098,7 +1132,7 @@
"""A committablectx object provides common functionality for a context that
wants the ability to commit, e.g. workingctx or memctx."""
def __init__(self, repo, text="", user=None, date=None, extra=None,
- changes=None):
+ changes=None, branch=None):
super(committablectx, self).__init__(repo)
self._rev = None
self._node = None
@@ -1113,13 +1147,9 @@
self._extra = {}
if extra:
self._extra = extra.copy()
- if 'branch' not in self._extra:
- try:
- branch = encoding.fromlocal(self._repo.dirstate.branch())
- except UnicodeDecodeError:
- raise error.Abort(_('branch name not in UTF-8!'))
- self._extra['branch'] = branch
- if self._extra['branch'] == '':
+ if branch is not None:
+ self._extra['branch'] = encoding.fromlocal(branch)
+ if not self._extra.get('branch'):
self._extra['branch'] = 'default'
def __bytes__(self):
@@ -1132,42 +1162,6 @@
__bool__ = __nonzero__
- def _buildflagfunc(self):
- # Create a fallback function for getting file flags when the
- # filesystem doesn't support them
-
- copiesget = self._repo.dirstate.copies().get
- parents = self.parents()
- if len(parents) < 2:
- # when we have one parent, it's easy: copy from parent
- man = parents[0].manifest()
- def func(f):
- f = copiesget(f, f)
- return man.flags(f)
- else:
- # merges are tricky: we try to reconstruct the unstored
- # result from the merge (issue1802)
- p1, p2 = parents
- pa = p1.ancestor(p2)
- m1, m2, ma = p1.manifest(), p2.manifest(), pa.manifest()
-
- def func(f):
- f = copiesget(f, f) # may be wrong for merges with copies
- fl1, fl2, fla = m1.flags(f), m2.flags(f), ma.flags(f)
- if fl1 == fl2:
- return fl1
- if fl1 == fla:
- return fl2
- if fl2 == fla:
- return fl1
- return '' # punt for conflicts
-
- return func
-
- @propertycache
- def _flagfunc(self):
- return self._repo.dirstate.flagfunc(self._buildflagfunc)
-
@propertycache
def _status(self):
return self._repo.status()
@@ -1206,26 +1200,10 @@
return self._status.removed
def deleted(self):
return self._status.deleted
- @propertycache
- def _copies(self):
- p1copies = {}
- p2copies = {}
- parents = self._repo.dirstate.parents()
- p1manifest = self._repo[parents[0]].manifest()
- p2manifest = self._repo[parents[1]].manifest()
- narrowmatch = self._repo.narrowmatch()
- for dst, src in self._repo.dirstate.copies().items():
- if not narrowmatch(dst):
- continue
- if src in p1manifest:
- p1copies[dst] = src
- elif src in p2manifest:
- p2copies[dst] = src
- return p1copies, p2copies
- def p1copies(self):
- return self._copies[0]
- def p2copies(self):
- return self._copies[1]
+ filesmodified = modified
+ filesadded = added
+ filesremoved = removed
+
def branch(self):
return encoding.tolocal(self._extra['branch'])
def closesbranch(self):
@@ -1257,33 +1235,10 @@
def children(self):
return []
- def flags(self, path):
- if r'_manifest' in self.__dict__:
- try:
- return self._manifest.flags(path)
- except KeyError:
- return ''
-
- try:
- return self._flagfunc(path)
- except OSError:
- return ''
-
def ancestor(self, c2):
"""return the "best" ancestor context of self and c2"""
return self._parents[0].ancestor(c2) # punt on two parents for now
- def walk(self, match):
- '''Generates matching file names.'''
- return sorted(self._repo.dirstate.walk(self._repo.narrowmatch(match),
- subrepos=sorted(self.substate),
- unknown=True, ignored=False))
-
- def matches(self, match):
- match = self._repo.narrowmatch(match)
- ds = self._repo.dirstate
- return sorted(f for f in ds.matches(match) if ds[f] != 'r')
-
def ancestors(self):
for p in self._parents:
yield p
@@ -1301,18 +1256,6 @@
"""
- with self._repo.dirstate.parentchange():
- for f in self.modified() + self.added():
- self._repo.dirstate.normal(f)
- for f in self.removed():
- self._repo.dirstate.drop(f)
- self._repo.dirstate.setparents(node)
-
- # write changes out explicitly, because nesting wlock at
- # runtime may prevent 'wlock.release()' in 'repo.commit()'
- # from immediately doing so for subsequent changing files
- self._repo.dirstate.write(self._repo.currenttransaction())
-
def dirty(self, missing=False, merge=True, branch=True):
return False
@@ -1327,7 +1270,14 @@
"""
def __init__(self, repo, text="", user=None, date=None, extra=None,
changes=None):
- super(workingctx, self).__init__(repo, text, user, date, extra, changes)
+ branch = None
+ if not extra or 'branch' not in extra:
+ try:
+ branch = repo.dirstate.branch()
+ except UnicodeDecodeError:
+ raise error.Abort(_('branch name not in UTF-8!'))
+ super(workingctx, self).__init__(repo, text, user, date, extra, changes,
+ branch=branch)
def __iter__(self):
d = self._repo.dirstate
@@ -1355,6 +1305,54 @@
self._manifest
return super(workingctx, self)._fileinfo(path)
+ def _buildflagfunc(self):
+ # Create a fallback function for getting file flags when the
+ # filesystem doesn't support them
+
+ copiesget = self._repo.dirstate.copies().get
+ parents = self.parents()
+ if len(parents) < 2:
+ # when we have one parent, it's easy: copy from parent
+ man = parents[0].manifest()
+ def func(f):
+ f = copiesget(f, f)
+ return man.flags(f)
+ else:
+ # merges are tricky: we try to reconstruct the unstored
+ # result from the merge (issue1802)
+ p1, p2 = parents
+ pa = p1.ancestor(p2)
+ m1, m2, ma = p1.manifest(), p2.manifest(), pa.manifest()
+
+ def func(f):
+ f = copiesget(f, f) # may be wrong for merges with copies
+ fl1, fl2, fla = m1.flags(f), m2.flags(f), ma.flags(f)
+ if fl1 == fl2:
+ return fl1
+ if fl1 == fla:
+ return fl2
+ if fl2 == fla:
+ return fl1
+ return '' # punt for conflicts
+
+ return func
+
+ @propertycache
+ def _flagfunc(self):
+ return self._repo.dirstate.flagfunc(self._buildflagfunc)
+
+ def flags(self, path):
+ if r'_manifest' in self.__dict__:
+ try:
+ return self._manifest.flags(path)
+ except KeyError:
+ return ''
+
+ try:
+ return self._flagfunc(path)
+ except OSError:
+ return ''
+
def filectx(self, path, filelog=None):
"""get a file context from the working directory"""
return workingfilectx(self._repo, path, workingctx=self,
@@ -1579,6 +1577,23 @@
return s
@propertycache
+ def _copies(self):
+ p1copies = {}
+ p2copies = {}
+ parents = self._repo.dirstate.parents()
+ p1manifest = self._repo[parents[0]].manifest()
+ p2manifest = self._repo[parents[1]].manifest()
+ narrowmatch = self._repo.narrowmatch()
+ for dst, src in self._repo.dirstate.copies().items():
+ if not narrowmatch(dst):
+ continue
+ if src in p1manifest:
+ p1copies[dst] = src
+ elif src in p2manifest:
+ p2copies[dst] = src
+ return p1copies, p2copies
+
+ @propertycache
def _manifest(self):
"""generate a manifest corresponding to the values in self._status
@@ -1651,8 +1666,29 @@
match.bad = bad
return match
+ def walk(self, match):
+ '''Generates matching file names.'''
+ return sorted(self._repo.dirstate.walk(self._repo.narrowmatch(match),
+ subrepos=sorted(self.substate),
+ unknown=True, ignored=False))
+
+ def matches(self, match):
+ match = self._repo.narrowmatch(match)
+ ds = self._repo.dirstate
+ return sorted(f for f in ds.matches(match) if ds[f] != 'r')
+
def markcommitted(self, node):
- super(workingctx, self).markcommitted(node)
+ with self._repo.dirstate.parentchange():
+ for f in self.modified() + self.added():
+ self._repo.dirstate.normal(f)
+ for f in self.removed():
+ self._repo.dirstate.drop(f)
+ self._repo.dirstate.setparents(node)
+
+ # write changes out explicitly, because nesting wlock at
+ # runtime may prevent 'wlock.release()' in 'repo.commit()'
+ # from immediately doing so for subsequent changing files
+ self._repo.dirstate.write(self._repo.currenttransaction())
sparse.aftercommit(self._repo, node)
@@ -1726,6 +1762,8 @@
def size(self):
return self._repo.wvfs.lstat(self._path).st_size
+ def lstat(self):
+ return self._repo.wvfs.lstat(self._path)
def date(self):
t, tz = self._changectx.date()
try:
@@ -1761,14 +1799,13 @@
def write(self, data, flags, backgroundclose=False, **kwargs):
"""wraps repo.wwrite"""
- self._repo.wwrite(self._path, data, flags,
- backgroundclose=backgroundclose,
- **kwargs)
+ return self._repo.wwrite(self._path, data, flags,
+ backgroundclose=backgroundclose,
+ **kwargs)
def markcopied(self, src):
"""marks this file a copy of `src`"""
- if self._repo.dirstate[self._path] in "nma":
- self._repo.dirstate.copy(src, self._path)
+ self._repo.dirstate.copy(src, self._path)
def clearunknown(self):
"""Removes conflicting items in the working directory so that
@@ -1913,7 +1950,7 @@
if self.isdirty(path):
return self._cache[path]['copied']
else:
- raise error.ProgrammingError('copydata() called on clean context')
+ return None
def flags(self, path):
if self.isdirty(path):
@@ -2055,7 +2092,7 @@
else:
parents = (self._repo[parents[0]], self._repo[parents[1]])
- files = self._cache.keys()
+ files = self.files()
def getfile(repo, memctx, path):
if self._cache[path]['exists']:
return memfilectx(repo, memctx, path,
@@ -2118,7 +2155,9 @@
# the file is marked as existing.
if exists and data is None:
oldentry = self._cache.get(path) or {}
- data = oldentry.get('data') or self._wrappedctx[path].data()
+ data = oldentry.get('data')
+ if data is None:
+ data = self._wrappedctx[path].data()
self._cache[path] = {
'exists': exists,
@@ -2305,7 +2344,8 @@
def __init__(self, repo, parents, text, files, filectxfn, user=None,
date=None, extra=None, branch=None, editor=False):
- super(memctx, self).__init__(repo, text, user, date, extra)
+ super(memctx, self).__init__(repo, text, user, date, extra,
+ branch=branch)
self._rev = None
self._node = None
parents = [(p or nullid) for p in parents]
@@ -2313,8 +2353,6 @@
self._parents = [self._repo[p] for p in (p1, p2)]
files = sorted(set(files))
self._files = files
- if branch is not None:
- self._extra['branch'] = encoding.fromlocal(branch)
self.substate = {}
if isinstance(filectxfn, patch.filestore):
--- a/mercurial/copies.py Tue Jul 09 10:07:35 2019 -0400
+++ b/mercurial/copies.py Mon Jul 22 14:00:33 2019 -0400
@@ -107,40 +107,60 @@
# This only occurs when a is a descendent of b or visa-versa.
return min(limit, a, b)
-def _chain(src, dst, a, b):
- """chain two sets of copies a->b"""
- t = a.copy()
- for k, v in b.iteritems():
- if v in t:
- # found a chain
- if t[v] != k:
- # file wasn't renamed back to itself
- t[k] = t[v]
- if v not in dst:
- # chain was a rename, not a copy
- del t[v]
- if v in src:
- # file is a copy of an existing file
- t[k] = v
+def _filter(src, dst, t):
+ """filters out invalid copies after chaining"""
+
+ # When _chain()'ing copies in 'a' (from 'src' via some other commit 'mid')
+ # with copies in 'b' (from 'mid' to 'dst'), we can get the different cases
+ # in the following table (not including trivial cases). For example, case 2
+ # is where a file existed in 'src' and remained under that name in 'mid' and
+ # then was renamed between 'mid' and 'dst'.
+ #
+ # case src mid dst result
+ # 1 x y - -
+ # 2 x y y x->y
+ # 3 x y x -
+ # 4 x y z x->z
+ # 5 - x y -
+ # 6 x x y x->y
+ #
+ # _chain() takes care of chaining the copies in 'a' and 'b', but it
+ # cannot tell the difference between cases 1 and 2, between 3 and 4, or
+ # between 5 and 6, so it includes all cases in its result.
+ # Cases 1, 3, and 5 are then removed by _filter().
for k, v in list(t.items()):
+ # remove copies from files that didn't exist
+ if v not in src:
+ del t[k]
# remove criss-crossed copies
- if k in src and v in dst:
+ elif k in src and v in dst:
del t[k]
# remove copies to files that were then removed
elif k not in dst:
del t[k]
+def _chain(a, b):
+ """chain two sets of copies 'a' and 'b'"""
+ t = a.copy()
+ for k, v in b.iteritems():
+ if v in t:
+ t[k] = t[v]
+ else:
+ t[k] = v
return t
-def _tracefile(fctx, am, limit=node.nullrev):
+def _tracefile(fctx, am, basemf, limit):
"""return file context that is the ancestor of fctx present in ancestor
manifest am, stopping after the first ancestor lower than limit"""
for f in fctx.ancestors():
- if am.get(f.path(), None) == f.filenode():
- return f
- if limit >= 0 and not f.isintroducedafter(limit):
+ path = f.path()
+ if am.get(path, None) == f.filenode():
+ return path
+ if basemf and basemf.get(path, None) == f.filenode():
+ return path
+ if not f.isintroducedafter(limit):
return None
def _dirstatecopies(repo, match=None):
@@ -165,7 +185,7 @@
return (repo.ui.config('experimental', 'copies.read-from') in
('changeset-only', 'compatibility'))
-def _committedforwardcopies(a, b, match):
+def _committedforwardcopies(a, b, base, match):
"""Like _forwardcopies(), but b.rev() cannot be None (working copy)"""
# files might have to be traced back to the fctx parent of the last
# one-side-only changeset, but not further back than that
@@ -183,6 +203,7 @@
if debug:
dbg('debug.copies: search limit: %d\n' % limit)
am = a.manifest()
+ basemf = None if base is None else base.manifest()
# find where new files came from
# we currently don't try to find where old files went, too expensive
@@ -204,9 +225,9 @@
ancestrycontext = a._repo.changelog.ancestors([b.rev()], inclusive=True)
if debug:
- dbg('debug.copies: missing file to search: %d\n' % len(missing))
+ dbg('debug.copies: missing files to search: %d\n' % len(missing))
- for f in missing:
+ for f in sorted(missing):
if debug:
dbg('debug.copies: tracing file: %s\n' % f)
fctx = b[f]
@@ -214,11 +235,11 @@
if debug:
start = util.timer()
- ofctx = _tracefile(fctx, am, limit)
- if ofctx:
+ opath = _tracefile(fctx, am, basemf, limit)
+ if opath:
if debug:
- dbg('debug.copies: rename of: %s\n' % ofctx._path)
- cm[f] = ofctx.path()
+ dbg('debug.copies: rename of: %s\n' % opath)
+ cm[f] = opath
if debug:
dbg('debug.copies: time: %f seconds\n'
% (util.timer() - start))
@@ -245,40 +266,30 @@
# 'work' contains 3-tuples of a (revision number, parent number, copies).
# The parent number is only used for knowing which parent the copies dict
# came from.
+ # NOTE: To reduce costly copying the 'copies' dicts, we reuse the same
+ # instance for *one* of the child nodes (the last one). Once an instance
+ # has been put on the queue, it is thus no longer safe to modify it.
+ # Conversely, it *is* safe to modify an instance popped off the queue.
work = [(r, 1, {}) for r in roots]
heapq.heapify(work)
+ alwaysmatch = match.always()
while work:
- r, i1, copies1 = heapq.heappop(work)
+ r, i1, copies = heapq.heappop(work)
if work and work[0][0] == r:
# We are tracing copies from both parents
r, i2, copies2 = heapq.heappop(work)
- copies = {}
- ctx = repo[r]
- p1man, p2man = ctx.p1().manifest(), ctx.p2().manifest()
- allcopies = set(copies1) | set(copies2)
- # TODO: perhaps this filtering should be done as long as ctx
- # is merge, whether or not we're tracing from both parent.
- for dst in allcopies:
- if not match(dst):
- continue
- if dst not in copies2:
- # Copied on p1 side: mark as copy from p1 side if it didn't
- # already exist on p2 side
- if dst not in p2man:
- copies[dst] = copies1[dst]
- elif dst not in copies1:
- # Copied on p2 side: mark as copy from p2 side if it didn't
- # already exist on p1 side
- if dst not in p1man:
- copies[dst] = copies2[dst]
- else:
- # Copied on both sides: mark as copy from p1 side
- copies[dst] = copies1[dst]
- else:
- copies = copies1
+ for dst, src in copies2.items():
+ # Unlike when copies are stored in the filelog, we consider
+ # it a copy even if the destination already existed on the
+ # other branch. It's simply too expensive to check if the
+ # file existed in the manifest.
+ if dst not in copies:
+ # If it was copied on the p1 side, leave it as copied from
+ # that side, even if it was also copied on the p2 side.
+ copies[dst] = copies2[dst]
if r == b.rev():
return copies
- for c in children[r]:
+ for i, c in enumerate(children[r]):
childctx = repo[c]
if r == childctx.p1().rev():
parent = 1
@@ -287,27 +298,36 @@
assert r == childctx.p2().rev()
parent = 2
childcopies = childctx.p2copies()
- if not match.always():
+ if not alwaysmatch:
childcopies = {dst: src for dst, src in childcopies.items()
if match(dst)}
- childcopies = _chain(a, childctx, copies, childcopies)
- heapq.heappush(work, (c, parent, childcopies))
+ # Copy the dict only if later iterations will also need it
+ if i != len(children[r]) - 1:
+ newcopies = copies.copy()
+ else:
+ newcopies = copies
+ if childcopies:
+ newcopies = _chain(newcopies, childcopies)
+ for f in childctx.filesremoved():
+ if f in newcopies:
+ del newcopies[f]
+ heapq.heappush(work, (c, parent, newcopies))
assert False
-def _forwardcopies(a, b, match=None):
+def _forwardcopies(a, b, base=None, match=None):
"""find {dst@b: src@a} copy mapping where a is an ancestor of b"""
+ if base is None:
+ base = a
match = a.repo().narrowmatch(match)
# check for working copy
if b.rev() is None:
- if a == b.p1():
- # short-circuit to avoid issues with merge states
- return _dirstatecopies(b._repo, match)
-
- cm = _committedforwardcopies(a, b.p1(), match)
+ cm = _committedforwardcopies(a, b.p1(), base, match)
# combine copies from dirstate if necessary
- return _chain(a, b, cm, _dirstatecopies(b._repo, match))
- return _committedforwardcopies(a, b, match)
+ copies = _chain(cm, _dirstatecopies(b._repo, match))
+ else:
+ copies = _committedforwardcopies(a, b, base, match)
+ return copies
def _backwardrenames(a, b, match):
if a._repo.ui.config('experimental', 'copytrace') == 'off':
@@ -343,90 +363,24 @@
if a == x:
if debug:
repo.ui.debug('debug.copies: search mode: forward\n')
- return _forwardcopies(x, y, match=match)
- if a == y:
+ if y.rev() is None and x == y.p1():
+ # short-circuit to avoid issues with merge states
+ return _dirstatecopies(repo, match)
+ copies = _forwardcopies(x, y, match=match)
+ elif a == y:
if debug:
repo.ui.debug('debug.copies: search mode: backward\n')
- return _backwardrenames(x, y, match=match)
- if debug:
- repo.ui.debug('debug.copies: search mode: combined\n')
- return _chain(x, y, _backwardrenames(x, a, match=match),
- _forwardcopies(a, y, match=match))
-
-def _computenonoverlap(repo, c1, c2, addedinm1, addedinm2, baselabel=''):
- """Computes, based on addedinm1 and addedinm2, the files exclusive to c1
- and c2. This is its own function so extensions can easily wrap this call
- to see what files mergecopies is about to process.
-
- Even though c1 and c2 are not used in this function, they are useful in
- other extensions for being able to read the file nodes of the changed files.
-
- "baselabel" can be passed to help distinguish the multiple computations
- done in the graft case.
- """
- u1 = sorted(addedinm1 - addedinm2)
- u2 = sorted(addedinm2 - addedinm1)
-
- header = " unmatched files in %s"
- if baselabel:
- header += ' (from %s)' % baselabel
- if u1:
- repo.ui.debug("%s:\n %s\n" % (header % 'local', "\n ".join(u1)))
- if u2:
- repo.ui.debug("%s:\n %s\n" % (header % 'other', "\n ".join(u2)))
-
- return u1, u2
-
-def _makegetfctx(ctx):
- """return a 'getfctx' function suitable for _checkcopies usage
-
- We have to re-setup the function building 'filectx' for each
- '_checkcopies' to ensure the linkrev adjustment is properly setup for
- each. Linkrev adjustment is important to avoid bug in rename
- detection. Moreover, having a proper '_ancestrycontext' setup ensures
- the performance impact of this adjustment is kept limited. Without it,
- each file could do a full dag traversal making the time complexity of
- the operation explode (see issue4537).
-
- This function exists here mostly to limit the impact on stable. Feel
- free to refactor on default.
- """
- rev = ctx.rev()
- repo = ctx._repo
- ac = getattr(ctx, '_ancestrycontext', None)
- if ac is None:
- revs = [rev]
- if rev is None:
- revs = [p.rev() for p in ctx.parents()]
- ac = repo.changelog.ancestors(revs, inclusive=True)
- ctx._ancestrycontext = ac
- def makectx(f, n):
- if n in node.wdirfilenodeids: # in a working context?
- if ctx.rev() is None:
- return ctx.filectx(f)
- return repo[None][f]
- fctx = repo.filectx(f, fileid=n)
- # setup only needed for filectx not create from a changectx
- fctx._ancestrycontext = ac
- fctx._descendantrev = rev
- return fctx
- return util.lrucachefunc(makectx)
-
-def _combinecopies(copyfrom, copyto, finalcopy, diverge, incompletediverge):
- """combine partial copy paths"""
- remainder = {}
- for f in copyfrom:
- if f in copyto:
- finalcopy[copyto[f]] = copyfrom[f]
- del copyto[f]
- for f in incompletediverge:
- assert f not in diverge
- ic = incompletediverge[f]
- if ic[0] in copyto:
- diverge[f] = [copyto[ic[0]], ic[1]]
- else:
- remainder[f] = ic
- return remainder
+ copies = _backwardrenames(x, y, match=match)
+ else:
+ if debug:
+ repo.ui.debug('debug.copies: search mode: combined\n')
+ base = None
+ if a.rev() != node.nullrev:
+ base = x
+ copies = _chain(_backwardrenames(x, a, match=match),
+ _forwardcopies(a, y, base, match=match))
+ _filter(x, y, copies)
+ return copies
def mergecopies(repo, c1, c2, base):
"""
@@ -485,7 +439,14 @@
return _dirstatecopies(repo, narrowmatch), {}, {}, {}, {}
copytracing = repo.ui.config('experimental', 'copytrace')
- boolctrace = stringutil.parsebool(copytracing)
+ if stringutil.parsebool(copytracing) is False:
+ # stringutil.parsebool() returns None when it is unable to parse the
+ # value, so we should rely on making sure copytracing is on such cases
+ return {}, {}, {}, {}, {}
+
+ if usechangesetcentricalgo(repo):
+ # The heuristics don't make sense when we need changeset-centric algos
+ return _fullcopytracing(repo, c1, c2, base)
# Copy trace disabling is explicitly below the node == p1 logic above
# because the logic above is required for a simple copy to be kept across a
@@ -497,10 +458,6 @@
if _isfullcopytraceable(repo, c1, base):
return _fullcopytracing(repo, c1, c2, base)
return _heuristicscopytracing(repo, c1, c2, base)
- elif boolctrace is False:
- # stringutil.parsebool() returns None when it is unable to parse the
- # value, so we should rely on making sure copytracing is on such cases
- return {}, {}, {}, {}, {}
else:
return _fullcopytracing(repo, c1, c2, base)
@@ -522,6 +479,23 @@
return commits < sourcecommitlimit
return False
+def _checksinglesidecopies(src, dsts1, m1, m2, mb, c2, base,
+ copy, renamedelete):
+ if src not in m2:
+ # deleted on side 2
+ if src not in m1:
+ # renamed on side 1, deleted on side 2
+ renamedelete[src] = dsts1
+ elif m2[src] != mb[src]:
+ if not _related(c2[src], base[src]):
+ return
+ # modified on side 2
+ for dst in dsts1:
+ if dst not in m2:
+ # dst not added on side 2 (handle as regular
+ # "both created" case in manifestmerge otherwise)
+ copy[dst] = src
+
def _fullcopytracing(repo, c1, c2, base):
""" The full copytracing algorithm which finds all the new files that were
added from merge base up to the top commit and for each file it checks if
@@ -530,159 +504,84 @@
This is pretty slow when a lot of changesets are involved but will track all
the copies.
"""
- # In certain scenarios (e.g. graft, update or rebase), base can be
- # overridden We still need to know a real common ancestor in this case We
- # can't just compute _c1.ancestor(_c2) and compare it to ca, because there
- # can be multiple common ancestors, e.g. in case of bidmerge. Because our
- # caller may not know if the revision passed in lieu of the CA is a genuine
- # common ancestor or not without explicitly checking it, it's better to
- # determine that here.
- #
- # base.isancestorof(wc) is False, work around that
- _c1 = c1.p1() if c1.rev() is None else c1
- _c2 = c2.p1() if c2.rev() is None else c2
- # an endpoint is "dirty" if it isn't a descendant of the merge base
- # if we have a dirty endpoint, we need to trigger graft logic, and also
- # keep track of which endpoint is dirty
- dirtyc1 = not base.isancestorof(_c1)
- dirtyc2 = not base.isancestorof(_c2)
- graft = dirtyc1 or dirtyc2
- tca = base
- if graft:
- tca = _c1.ancestor(_c2)
-
- limit = _findlimit(repo, c1, c2)
- repo.ui.debug(" searching for copies back to rev %d\n" % limit)
-
m1 = c1.manifest()
m2 = c2.manifest()
mb = base.manifest()
- # gather data from _checkcopies:
- # - diverge = record all diverges in this dict
- # - copy = record all non-divergent copies in this dict
- # - fullcopy = record all copies in this dict
- # - incomplete = record non-divergent partial copies here
- # - incompletediverge = record divergent partial copies here
- diverge = {} # divergence data is shared
- incompletediverge = {}
- data1 = {'copy': {},
- 'fullcopy': {},
- 'incomplete': {},
- 'diverge': diverge,
- 'incompletediverge': incompletediverge,
- }
- data2 = {'copy': {},
- 'fullcopy': {},
- 'incomplete': {},
- 'diverge': diverge,
- 'incompletediverge': incompletediverge,
- }
+ copies1 = pathcopies(base, c1)
+ copies2 = pathcopies(base, c2)
+
+ inversecopies1 = {}
+ inversecopies2 = {}
+ for dst, src in copies1.items():
+ inversecopies1.setdefault(src, []).append(dst)
+ for dst, src in copies2.items():
+ inversecopies2.setdefault(src, []).append(dst)
+
+ copy = {}
+ diverge = {}
+ renamedelete = {}
+ allsources = set(inversecopies1) | set(inversecopies2)
+ for src in allsources:
+ dsts1 = inversecopies1.get(src)
+ dsts2 = inversecopies2.get(src)
+ if dsts1 and dsts2:
+ # copied/renamed on both sides
+ if src not in m1 and src not in m2:
+ # renamed on both sides
+ dsts1 = set(dsts1)
+ dsts2 = set(dsts2)
+ # If there's some overlap in the rename destinations, we
+ # consider it not divergent. For example, if side 1 copies 'a'
+ # to 'b' and 'c' and deletes 'a', and side 2 copies 'a' to 'c'
+ # and 'd' and deletes 'a'.
+ if dsts1 & dsts2:
+ for dst in (dsts1 & dsts2):
+ copy[dst] = src
+ else:
+ diverge[src] = sorted(dsts1 | dsts2)
+ elif src in m1 and src in m2:
+ # copied on both sides
+ dsts1 = set(dsts1)
+ dsts2 = set(dsts2)
+ for dst in (dsts1 & dsts2):
+ copy[dst] = src
+ # TODO: Handle cases where it was renamed on one side and copied
+ # on the other side
+ elif dsts1:
+ # copied/renamed only on side 1
+ _checksinglesidecopies(src, dsts1, m1, m2, mb, c2, base,
+ copy, renamedelete)
+ elif dsts2:
+ # copied/renamed only on side 2
+ _checksinglesidecopies(src, dsts2, m2, m1, mb, c1, base,
+ copy, renamedelete)
+
+ renamedeleteset = set()
+ divergeset = set()
+ for dsts in diverge.values():
+ divergeset.update(dsts)
+ for dsts in renamedelete.values():
+ renamedeleteset.update(dsts)
# find interesting file sets from manifests
addedinm1 = m1.filesnotin(mb, repo.narrowmatch())
addedinm2 = m2.filesnotin(mb, repo.narrowmatch())
- bothnew = sorted(addedinm1 & addedinm2)
- if tca == base:
- # unmatched file from base
- u1r, u2r = _computenonoverlap(repo, c1, c2, addedinm1, addedinm2)
- u1u, u2u = u1r, u2r
- else:
- # unmatched file from base (DAG rotation in the graft case)
- u1r, u2r = _computenonoverlap(repo, c1, c2, addedinm1, addedinm2,
- baselabel='base')
- # unmatched file from topological common ancestors (no DAG rotation)
- # need to recompute this for directory move handling when grafting
- mta = tca.manifest()
- u1u, u2u = _computenonoverlap(repo, c1, c2,
- m1.filesnotin(mta, repo.narrowmatch()),
- m2.filesnotin(mta, repo.narrowmatch()),
- baselabel='topological common ancestor')
-
- for f in u1u:
- _checkcopies(c1, c2, f, base, tca, dirtyc1, limit, data1)
-
- for f in u2u:
- _checkcopies(c2, c1, f, base, tca, dirtyc2, limit, data2)
-
- copy = dict(data1['copy'])
- copy.update(data2['copy'])
- fullcopy = dict(data1['fullcopy'])
- fullcopy.update(data2['fullcopy'])
-
- if dirtyc1:
- _combinecopies(data2['incomplete'], data1['incomplete'], copy, diverge,
- incompletediverge)
- if dirtyc2:
- _combinecopies(data1['incomplete'], data2['incomplete'], copy, diverge,
- incompletediverge)
-
- renamedelete = {}
- renamedeleteset = set()
- divergeset = set()
- for of, fl in list(diverge.items()):
- if len(fl) == 1 or of in c1 or of in c2:
- del diverge[of] # not actually divergent, or not a rename
- if of not in c1 and of not in c2:
- # renamed on one side, deleted on the other side, but filter
- # out files that have been renamed and then deleted
- renamedelete[of] = [f for f in fl if f in c1 or f in c2]
- renamedeleteset.update(fl) # reverse map for below
- else:
- divergeset.update(fl) # reverse map for below
+ u1 = sorted(addedinm1 - addedinm2)
+ u2 = sorted(addedinm2 - addedinm1)
- if bothnew:
- repo.ui.debug(" unmatched files new in both:\n %s\n"
- % "\n ".join(bothnew))
- bothdiverge = {}
- bothincompletediverge = {}
- remainder = {}
- both1 = {'copy': {},
- 'fullcopy': {},
- 'incomplete': {},
- 'diverge': bothdiverge,
- 'incompletediverge': bothincompletediverge
- }
- both2 = {'copy': {},
- 'fullcopy': {},
- 'incomplete': {},
- 'diverge': bothdiverge,
- 'incompletediverge': bothincompletediverge
- }
- for f in bothnew:
- _checkcopies(c1, c2, f, base, tca, dirtyc1, limit, both1)
- _checkcopies(c2, c1, f, base, tca, dirtyc2, limit, both2)
- if dirtyc1 and dirtyc2:
- remainder = _combinecopies(both2['incomplete'], both1['incomplete'],
- copy, bothdiverge, bothincompletediverge)
- remainder1 = _combinecopies(both1['incomplete'], both2['incomplete'],
- copy, bothdiverge, bothincompletediverge)
- remainder.update(remainder1)
- elif dirtyc1:
- # incomplete copies may only be found on the "dirty" side for bothnew
- assert not both2['incomplete']
- remainder = _combinecopies({}, both1['incomplete'], copy, bothdiverge,
- bothincompletediverge)
- elif dirtyc2:
- assert not both1['incomplete']
- remainder = _combinecopies({}, both2['incomplete'], copy, bothdiverge,
- bothincompletediverge)
- else:
- # incomplete copies and divergences can't happen outside grafts
- assert not both1['incomplete']
- assert not both2['incomplete']
- assert not bothincompletediverge
- for f in remainder:
- assert f not in bothdiverge
- ic = remainder[f]
- if ic[0] in (m1 if dirtyc1 else m2):
- # backed-out rename on one side, but watch out for deleted files
- bothdiverge[f] = ic
- for of, fl in bothdiverge.items():
- if len(fl) == 2 and fl[0] == fl[1]:
- copy[fl[0]] = of # not actually divergent, just matching renames
+ header = " unmatched files in %s"
+ if u1:
+ repo.ui.debug("%s:\n %s\n" % (header % 'local', "\n ".join(u1)))
+ if u2:
+ repo.ui.debug("%s:\n %s\n" % (header % 'other', "\n ".join(u2)))
- if fullcopy and repo.ui.debugflag:
+ fullcopy = copies1.copy()
+ fullcopy.update(copies2)
+ if not fullcopy:
+ return copy, {}, diverge, renamedelete, {}
+
+ if repo.ui.debugflag:
repo.ui.debug(" all copies found (* = to merge, ! = divergent, "
"% = renamed and deleted):\n")
for f in sorted(fullcopy):
@@ -697,16 +596,10 @@
note))
del divergeset
- if not fullcopy:
- return copy, {}, diverge, renamedelete, {}
-
repo.ui.debug(" checking for directory renames\n")
# generate a directory move map
d1, d2 = c1.dirs(), c2.dirs()
- # Hack for adding '', which is not otherwise added, to d1 and d2
- d1.addpath('/')
- d2.addpath('/')
invalid = set()
dirmove = {}
@@ -746,7 +639,7 @@
movewithdir = {}
# check unaccounted nonoverlapping files against directory moves
- for f in u1r + u2r:
+ for f in u1 + u2:
if f not in fullcopy:
for d in dirmove:
if f.startswith(d):
@@ -893,99 +786,6 @@
except StopIteration:
return False
-def _checkcopies(srcctx, dstctx, f, base, tca, remotebase, limit, data):
- """
- check possible copies of f from msrc to mdst
-
- srcctx = starting context for f in msrc
- dstctx = destination context for f in mdst
- f = the filename to check (as in msrc)
- base = the changectx used as a merge base
- tca = topological common ancestor for graft-like scenarios
- remotebase = True if base is outside tca::srcctx, False otherwise
- limit = the rev number to not search beyond
- data = dictionary of dictionary to store copy data. (see mergecopies)
-
- note: limit is only an optimization, and provides no guarantee that
- irrelevant revisions will not be visited
- there is no easy way to make this algorithm stop in a guaranteed way
- once it "goes behind a certain revision".
- """
-
- msrc = srcctx.manifest()
- mdst = dstctx.manifest()
- mb = base.manifest()
- mta = tca.manifest()
- # Might be true if this call is about finding backward renames,
- # This happens in the case of grafts because the DAG is then rotated.
- # If the file exists in both the base and the source, we are not looking
- # for a rename on the source side, but on the part of the DAG that is
- # traversed backwards.
- #
- # In the case there is both backward and forward renames (before and after
- # the base) this is more complicated as we must detect a divergence.
- # We use 'backwards = False' in that case.
- backwards = not remotebase and base != tca and f in mb
- getsrcfctx = _makegetfctx(srcctx)
- getdstfctx = _makegetfctx(dstctx)
-
- if msrc[f] == mb.get(f) and not remotebase:
- # Nothing to merge
- return
-
- of = None
- seen = {f}
- for oc in getsrcfctx(f, msrc[f]).ancestors():
- of = oc.path()
- if of in seen:
- # check limit late - grab last rename before
- if oc.linkrev() < limit:
- break
- continue
- seen.add(of)
-
- # remember for dir rename detection
- if backwards:
- data['fullcopy'][of] = f # grafting backwards through renames
- else:
- data['fullcopy'][f] = of
- if of not in mdst:
- continue # no match, keep looking
- if mdst[of] == mb.get(of):
- return # no merge needed, quit early
- c2 = getdstfctx(of, mdst[of])
- # c2 might be a plain new file on added on destination side that is
- # unrelated to the droids we are looking for.
- cr = _related(oc, c2)
- if cr and (of == f or of == c2.path()): # non-divergent
- if backwards:
- data['copy'][of] = f
- elif of in mb:
- data['copy'][f] = of
- elif remotebase: # special case: a <- b <- a -> b "ping-pong" rename
- data['copy'][of] = f
- del data['fullcopy'][f]
- data['fullcopy'][of] = f
- else: # divergence w.r.t. graft CA on one side of topological CA
- for sf in seen:
- if sf in mb:
- assert sf not in data['diverge']
- data['diverge'][sf] = [f, of]
- break
- return
-
- if of in mta:
- if backwards or remotebase:
- data['incomplete'][of] = f
- else:
- for sf in seen:
- if sf in mb:
- if tca == base:
- data['diverge'].setdefault(sf, []).append(f)
- else:
- data['incompletediverge'][sf] = [of, f]
- return
-
def duplicatecopies(repo, wctx, rev, fromrev, skiprev=None):
"""reproduce copies from fromrev to rev in the dirstate
@@ -1005,8 +805,7 @@
# metadata across the rebase anyway).
exclude = pathcopies(repo[fromrev], repo[skiprev])
for dst, src in pathcopies(repo[fromrev], repo[rev]).iteritems():
- # copies.pathcopies returns backward renames, so dst might not
- # actually be in the dirstate
if dst in exclude:
continue
- wctx[dst].markcopied(src)
+ if dst in wctx:
+ wctx[dst].markcopied(src)
--- a/mercurial/crecord.py Tue Jul 09 10:07:35 2019 -0400
+++ b/mercurial/crecord.py Mon Jul 22 14:00:33 2019 -0400
@@ -608,6 +608,7 @@
# the currently selected header, hunk, or hunk-line
self.currentselecteditem = self.headerlist[0]
+ self.lastapplieditem = None
# updated when printing out patch-display -- the 'lines' here are the
# line positions *in the pad*, not on the screen.
@@ -723,7 +724,7 @@
self.currentselecteditem = nextitem
self.recenterdisplayedarea()
- def nextsametype(self):
+ def nextsametype(self, test=False):
currentitem = self.currentselecteditem
sametype = lambda item: isinstance(item, type(currentitem))
nextitem = currentitem.nextitem()
@@ -739,7 +740,8 @@
self.togglefolded(parent)
self.currentselecteditem = nextitem
- self.recenterdisplayedarea()
+ if not test:
+ self.recenterdisplayedarea()
def rightarrowevent(self):
"""
@@ -838,6 +840,8 @@
"""
if item is None:
item = self.currentselecteditem
+ # Only set this when NOT using 'toggleall'
+ self.lastapplieditem = item
item.applied = not item.applied
@@ -931,6 +935,45 @@
self.toggleapply(item)
self.waslasttoggleallapplied = not self.waslasttoggleallapplied
+ def toggleallbetween(self):
+ "toggle applied on or off for all items in range [lastapplied,current]."
+ if (not self.lastapplieditem or
+ self.currentselecteditem == self.lastapplieditem):
+ # Treat this like a normal 'x'/' '
+ self.toggleapply()
+ return
+
+ startitem = self.lastapplieditem
+ enditem = self.currentselecteditem
+ # Verify that enditem is "after" startitem, otherwise swap them.
+ for direction in ['forward', 'reverse']:
+ nextitem = startitem.nextitem()
+ while nextitem and nextitem != enditem:
+ nextitem = nextitem.nextitem()
+ if nextitem:
+ break
+ # Looks like we went the wrong direction :)
+ startitem, enditem = enditem, startitem
+
+ if not nextitem:
+ # We didn't find a path going either forward or backward? Don't know
+ # how this can happen, let's not crash though.
+ return
+
+ nextitem = startitem
+ # Switch all items to be the opposite state of the currently selected
+ # item. Specifically:
+ # [ ] startitem
+ # [x] middleitem
+ # [ ] enditem <-- currently selected
+ # This will turn all three on, since the currently selected item is off.
+ # This does *not* invert each item (i.e. middleitem stays marked/on)
+ desiredstate = not self.currentselecteditem.applied
+ while nextitem != enditem.nextitem():
+ if nextitem.applied != desiredstate:
+ self.toggleapply(item=nextitem)
+ nextitem = nextitem.nextitem()
+
def togglefolded(self, item=None, foldparent=False):
"toggle folded flag of specified item (defaults to currently selected)"
if item is None:
@@ -1460,9 +1503,10 @@
can use crecord multiple times to split large changes into smaller changesets.
the following are valid keystrokes:
- [space] : (un-)select item ([~]/[x] = partly/fully applied)
+ x [space] : (un-)select item ([~]/[x] = partly/fully applied)
[enter] : (un-)select item and go to next item of same type
A : (un-)select all items
+ X : (un-)select all items between current and most-recent
up/down-arrow [k/j] : go to previous/next unfolded item
pgup/pgdn [K/J] : go to previous/next item of same type
right/left-arrow [l/h] : go to child item / parent item
@@ -1724,7 +1768,7 @@
keypressed = pycompat.bytestr(keypressed)
if keypressed in ["k", "KEY_UP"]:
self.uparrowevent()
- if keypressed in ["K", "KEY_PPAGE"]:
+ elif keypressed in ["K", "KEY_PPAGE"]:
self.uparrowshiftevent()
elif keypressed in ["j", "KEY_DOWN"]:
self.downarrowevent()
@@ -1742,8 +1786,6 @@
self.toggleamend(self.opts, test)
elif keypressed in ["c"]:
return True
- elif test and keypressed in ['X']:
- return True
elif keypressed in ["r"]:
if self.reviewcommit():
self.opts['review'] = True
@@ -1751,11 +1793,13 @@
elif test and keypressed in ['R']:
self.opts['review'] = True
return True
- elif keypressed in [' '] or (test and keypressed in ["TOGGLE"]):
+ elif keypressed in [' ', 'x']:
self.toggleapply()
elif keypressed in ['\n', 'KEY_ENTER']:
self.toggleapply()
- self.nextsametype()
+ self.nextsametype(test=test)
+ elif keypressed in ['X']:
+ self.toggleallbetween()
elif keypressed in ['A']:
self.toggleall()
elif keypressed in ['e']:
--- a/mercurial/dagop.py Tue Jul 09 10:07:35 2019 -0400
+++ b/mercurial/dagop.py Mon Jul 22 14:00:33 2019 -0400
@@ -259,13 +259,10 @@
yield rev
break
-def _reachablerootspure(repo, minroot, roots, heads, includepath):
- """return (heads(::<roots> and ::<heads>))
-
- If includepath is True, return (<roots>::<heads>)."""
+def _reachablerootspure(pfunc, minroot, roots, heads, includepath):
+ """See revlog.reachableroots"""
if not roots:
return []
- parentrevs = repo.changelog.parentrevs
roots = set(roots)
visit = list(heads)
reachable = set()
@@ -282,7 +279,7 @@
reached(rev)
if not includepath:
continue
- parents = parentrevs(rev)
+ parents = pfunc(rev)
seen[rev] = parents
for parent in parents:
if parent >= minroot and parent not in seen:
@@ -298,18 +295,13 @@
return reachable
def reachableroots(repo, roots, heads, includepath=False):
- """return (heads(::<roots> and ::<heads>))
-
- If includepath is True, return (<roots>::<heads>)."""
+ """See revlog.reachableroots"""
if not roots:
return baseset()
minroot = roots.min()
roots = list(roots)
heads = list(heads)
- try:
- revs = repo.changelog.reachableroots(minroot, heads, roots, includepath)
- except AttributeError:
- revs = _reachablerootspure(repo, minroot, roots, heads, includepath)
+ revs = repo.changelog.reachableroots(minroot, heads, roots, includepath)
revs = baseset(revs)
revs.sort()
return revs
--- a/mercurial/debugcommands.py Tue Jul 09 10:07:35 2019 -0400
+++ b/mercurial/debugcommands.py Mon Jul 22 14:00:33 2019 -0400
@@ -1240,7 +1240,7 @@
# Python
fm.write('pythonexe', _("checking Python executable (%s)\n"),
- pycompat.sysexecutable)
+ pycompat.sysexecutable or _("unknown"))
fm.write('pythonver', _("checking Python version (%s)\n"),
("%d.%d.%d" % sys.version_info[:3]))
fm.write('pythonlib', _("checking Python lib (%s)...\n"),
@@ -1278,16 +1278,28 @@
fm.write('hgmodules', _("checking installed modules (%s)...\n"),
os.path.dirname(pycompat.fsencode(__file__)))
- if policy.policy in ('c', 'allow'):
+ rustandc = policy.policy in ('rust+c', 'rust+c-allow')
+ rustext = rustandc # for now, that's the only case
+ cext = policy.policy in ('c', 'allow') or rustandc
+ nopure = cext or rustext
+ if nopure:
err = None
try:
- from .cext import (
- base85,
- bdiff,
- mpatch,
- osutil,
- )
- dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
+ if cext:
+ from .cext import (
+ base85,
+ bdiff,
+ mpatch,
+ osutil,
+ )
+ # quiet pyflakes
+ dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
+ if rustext:
+ from .rustext import (
+ ancestor,
+ dirstate,
+ )
+ dir(ancestor), dir(dirstate) # quiet pyflakes
except Exception as inst:
err = stringutil.forcebytestr(inst)
problems += 1
--- a/mercurial/dirstate.py Tue Jul 09 10:07:35 2019 -0400
+++ b/mercurial/dirstate.py Mon Jul 22 14:00:33 2019 -0400
@@ -28,6 +28,7 @@
)
parsers = policy.importmod(r'parsers')
+dirstatemod = policy.importrust(r'dirstate', default=parsers)
propertycache = util.propertycache
filecache = scmutil.filecache
@@ -390,12 +391,24 @@
self._updatedfiles.add(f)
self._map.addfile(f, oldstate, state, mode, size, mtime)
- def normal(self, f):
- '''Mark a file normal and clean.'''
- s = os.lstat(self._join(f))
- mtime = s[stat.ST_MTIME]
- self._addpath(f, 'n', s.st_mode,
- s.st_size & _rangemask, mtime & _rangemask)
+ def normal(self, f, parentfiledata=None):
+ '''Mark a file normal and clean.
+
+ parentfiledata: (mode, size, mtime) of the clean file
+
+ parentfiledata should be computed from memory (for mode,
+ size), as or close as possible from the point where we
+ determined the file was clean, to limit the risk of the
+ file having been changed by an external process between the
+ moment where the file was determined to be clean and now.'''
+ if parentfiledata:
+ (mode, size, mtime) = parentfiledata
+ else:
+ s = os.lstat(self._join(f))
+ mode = s.st_mode
+ size = s.st_size
+ mtime = s[stat.ST_MTIME]
+ self._addpath(f, 'n', mode, size & _rangemask, mtime & _rangemask)
self._map.copymap.pop(f, None)
if f in self._map.nonnormalset:
self._map.nonnormalset.remove(f)
@@ -656,8 +669,6 @@
self._dirty = False
def _dirignore(self, f):
- if f == '.':
- return False
if self._ignore(f):
return True
for p in util.finddirs(f):
@@ -751,15 +762,16 @@
del files[i]
j += 1
- if not files or '.' in files:
- files = ['.']
+ if not files or '' in files:
+ files = ['']
+ # constructing the foldmap is expensive, so don't do it for the
+ # common case where files is ['']
+ normalize = None
results = dict.fromkeys(subrepos)
results['.hg'] = None
for ff in files:
- # constructing the foldmap is expensive, so don't do it for the
- # common case where files is ['.']
- if normalize and ff != '.':
+ if normalize:
nf = normalize(ff, False, True)
else:
nf = ff
@@ -903,9 +915,7 @@
if visitentries == 'this' or visitentries == 'all':
visitentries = None
skip = None
- if nd == '.':
- nd = ''
- else:
+ if nd != '':
skip = '.hg'
try:
entries = listdir(join(nd), stat=True, skip=skip)
@@ -1465,7 +1475,7 @@
# parsing the dirstate.
#
# (we cannot decorate the function directly since it is in a C module)
- parse_dirstate = util.nogc(parsers.parse_dirstate)
+ parse_dirstate = util.nogc(dirstatemod.parse_dirstate)
p = parse_dirstate(self._map, self.copymap, st)
if not self._dirtyparents:
self.setparents(*p)
@@ -1476,8 +1486,8 @@
self.get = self._map.get
def write(self, st, now):
- st.write(parsers.pack_dirstate(self._map, self.copymap,
- self.parents(), now))
+ st.write(dirstatemod.pack_dirstate(self._map, self.copymap,
+ self.parents(), now))
st.close()
self._dirtyparents = False
self.nonnormalset, self.otherparentset = self.nonnormalentries()
--- a/mercurial/discovery.py Tue Jul 09 10:07:35 2019 -0400
+++ b/mercurial/discovery.py Mon Jul 22 14:00:33 2019 -0400
@@ -343,10 +343,19 @@
# 1. Check for new branches on the remote.
if newbranches and not newbranch: # new branch requires --new-branch
branchnames = ', '.join(sorted(newbranches))
- raise error.Abort(_("push creates new remote branches: %s!")
- % branchnames,
- hint=_("use 'hg push --new-branch' to create"
- " new remote branches"))
+ # Calculate how many of the new branches are closed branches
+ closedbranches = set()
+ for tag, heads, tip, isclosed in repo.branchmap().iterbranches():
+ if isclosed:
+ closedbranches.add(tag)
+ closedbranches = (closedbranches & set(newbranches))
+ if closedbranches:
+ errmsg = (_("push creates new remote branches: %s (%d closed)!")
+ % (branchnames, len(closedbranches)))
+ else:
+ errmsg = (_("push creates new remote branches: %s!")% branchnames)
+ hint=_("use 'hg push --new-branch' to create new remote branches")
+ raise error.Abort(errmsg, hint=hint)
# 2. Find heads that we need not warn about
nowarnheads = _nowarnheads(pushop)
--- a/mercurial/exchange.py Tue Jul 09 10:07:35 2019 -0400
+++ b/mercurial/exchange.py Mon Jul 22 14:00:33 2019 -0400
@@ -539,10 +539,12 @@
# get lock as we might write phase data
wlock = lock = None
try:
- # bundle2 push may receive a reply bundle touching bookmarks or other
- # things requiring the wlock. Take it now to ensure proper ordering.
+ # bundle2 push may receive a reply bundle touching bookmarks
+ # requiring the wlock. Take it now to ensure proper ordering.
maypushback = pushop.ui.configbool('experimental', 'bundle2.pushback')
- if (not _forcebundle1(pushop)) and maypushback:
+ if ((not _forcebundle1(pushop)) and
+ maypushback and
+ not bookmod.bookmarksinstore(repo)):
wlock = pushop.repo.wlock()
lock = pushop.repo.lock()
pushop.trmanager = transactionmanager(pushop.repo,
@@ -1548,7 +1550,10 @@
raise error.Abort(msg)
pullop.trmanager = transactionmanager(repo, 'pull', remote.url())
- with repo.wlock(), repo.lock(), pullop.trmanager:
+ wlock = util.nullcontextmanager()
+ if not bookmod.bookmarksinstore(repo):
+ wlock = repo.wlock()
+ with wlock, repo.lock(), pullop.trmanager:
# Use the modern wire protocol, if available.
if remote.capable('command-changesetdata'):
exchangev2.pull(pullop)
@@ -2395,7 +2400,8 @@
try:
def gettransaction():
if not lockandtr[2]:
- lockandtr[0] = repo.wlock()
+ if not bookmod.bookmarksinstore(repo):
+ lockandtr[0] = repo.wlock()
lockandtr[1] = repo.lock()
lockandtr[2] = repo.transaction(source)
lockandtr[2].hookargs['source'] = source
--- a/mercurial/extensions.py Tue Jul 09 10:07:35 2019 -0400
+++ b/mercurial/extensions.py Mon Jul 22 14:00:33 2019 -0400
@@ -43,7 +43,8 @@
'progress',
'interhg',
'inotify',
- 'hgcia'
+ 'hgcia',
+ 'shelve',
}
def extensions(ui=None):
@@ -221,14 +222,7 @@
extsetup = getattr(_extensions[name], 'extsetup', None)
if extsetup:
try:
- try:
- extsetup(ui)
- except TypeError:
- if pycompat.getargspec(extsetup).args:
- raise
- ui.deprecwarn("extsetup for '%s' must take a ui argument"
- % name, "4.9")
- extsetup() # old extsetup with no ui argument
+ extsetup(ui)
except Exception as inst:
ui.traceback(force=True)
msg = stringutil.forcebytestr(inst)
--- a/mercurial/exthelper.py Tue Jul 09 10:07:35 2019 -0400
+++ b/mercurial/exthelper.py Mon Jul 22 14:00:33 2019 -0400
@@ -15,9 +15,12 @@
commands,
error,
extensions,
+ pycompat,
registrar,
)
+from hgdemandimport import tracing
+
class exthelper(object):
"""Helper for modular extension setup
@@ -135,7 +138,8 @@
for cont, funcname, wrapper in self._functionwrappers:
extensions.wrapfunction(cont, funcname, wrapper)
for c in self._uicallables:
- c(ui)
+ with tracing.log(b'finaluisetup: %s', pycompat.sysbytes(repr(c))):
+ c(ui)
def finaluipopulate(self, ui):
"""Method to be used as the extension uipopulate
@@ -175,7 +179,8 @@
entry[1].append(opt)
for c in self._extcallables:
- c(ui)
+ with tracing.log(b'finalextsetup: %s', pycompat.sysbytes(repr(c))):
+ c(ui)
def finalreposetup(self, ui, repo):
"""Method to be used as the extension reposetup
@@ -187,7 +192,8 @@
- Changes to repo.__class__, repo.dirstate.__class__
"""
for c in self._repocallables:
- c(ui, repo)
+ with tracing.log(b'finalreposetup: %s', pycompat.sysbytes(repr(c))):
+ c(ui, repo)
def uisetup(self, call):
"""Decorated function will be executed during uisetup
--- a/mercurial/filemerge.py Tue Jul 09 10:07:35 2019 -0400
+++ b/mercurial/filemerge.py Mon Jul 22 14:00:33 2019 -0400
@@ -60,17 +60,20 @@
mergeonly = internaltool.mergeonly # just the full merge, no premerge
fullmerge = internaltool.fullmerge # both premerge and merge
+# IMPORTANT: keep the last line of this prompt very short ("What do you want to
+# do?") because of issue6158, ideally to <40 English characters (to allow other
+# languages that may take more columns to still have a chance to fit in an
+# 80-column screen).
_localchangedotherdeletedmsg = _(
"file '%(fd)s' was deleted in other%(o)s but was modified in local%(l)s.\n"
- "What do you want to do?\n"
- "use (c)hanged version, (d)elete, or leave (u)nresolved?"
+ "You can use (c)hanged version, (d)elete, or leave (u)nresolved.\n"
+ "What do you want to do?"
"$$ &Changed $$ &Delete $$ &Unresolved")
_otherchangedlocaldeletedmsg = _(
"file '%(fd)s' was deleted in local%(l)s but was modified in other%(o)s.\n"
- "What do you want to do?\n"
- "use (c)hanged version, leave (d)eleted, or "
- "leave (u)nresolved?"
+ "You can use (c)hanged version, leave (d)eleted, or leave (u)nresolved.\n"
+ "What do you want to do?"
"$$ &Changed $$ &Deleted $$ &Unresolved")
class absentfilectx(object):
@@ -299,9 +302,14 @@
_otherchangedlocaldeletedmsg % prompts, 2)
choice = ['other', 'local', 'unresolved'][index]
else:
+ # IMPORTANT: keep the last line of this prompt ("What do you want to
+ # do?") very short, see comment next to _localchangedotherdeletedmsg
+ # at the top of the file for details.
index = ui.promptchoice(
- _("keep (l)ocal%(l)s, take (o)ther%(o)s, or leave (u)nresolved"
- " for %(fd)s?"
+ _("file '%(fd)s' needs to be resolved.\n"
+ "You can keep (l)ocal%(l)s, take (o)ther%(o)s, or leave "
+ "(u)nresolved.\n"
+ "What do you want to do?"
"$$ &Local $$ &Other $$ &Unresolved") % prompts, 2)
choice = ['local', 'other', 'unresolved'][index]
--- a/mercurial/graphmod.py Tue Jul 09 10:07:35 2019 -0400
+++ b/mercurial/graphmod.py Mon Jul 22 14:00:33 2019 -0400
@@ -469,22 +469,6 @@
while len(text) < len(lines):
text.append("")
- if any(len(char) > 1 for char in edgemap.values()):
- # limit drawing an edge to the first or last N lines of the current
- # section the rest of the edge is drawn like a parent line.
- parent = state['styles'][PARENT][-1:]
- def _drawgp(char, i):
- # should a grandparent character be drawn for this line?
- if len(char) < 2:
- return True
- num = int(char[:-1])
- # either skip first num lines or take last num lines, based on sign
- return -num <= i if num < 0 else (len(lines) - i) <= num
- for i, line in enumerate(lines):
- line[:] = [c[-1:] if _drawgp(c, i) else parent for c in line]
- edgemap.update(
- (e, (c if len(c) < 2 else parent)) for e, c in edgemap.items())
-
# print lines
indentation_level = max(ncols, ncols + coldiff)
lines = ["%-*s " % (2 * indentation_level, "".join(line)) for line in lines]
--- a/mercurial/hbisect.py Tue Jul 09 10:07:35 2019 -0400
+++ b/mercurial/hbisect.py Mon Jul 22 14:00:33 2019 -0400
@@ -32,6 +32,7 @@
if searching for a first bad one.
"""
+ repo = repo.unfiltered()
changelog = repo.changelog
clparents = changelog.parentrevs
skip = {changelog.rev(n) for n in state['skip']}
@@ -139,7 +140,7 @@
state = {'current': [], 'good': [], 'bad': [], 'skip': []}
for l in repo.vfs.tryreadlines("bisect.state"):
kind, node = l[:-1].split()
- node = repo.lookup(node)
+ node = repo.unfiltered().lookup(node)
if kind not in state:
raise error.Abort(_("unknown bisect kind %s") % kind)
state[kind].append(node)
@@ -184,7 +185,7 @@
"""
state = load_state(repo)
if status in ('good', 'bad', 'skip', 'current'):
- return map(repo.changelog.rev, state[status])
+ return map(repo.unfiltered().changelog.rev, state[status])
else:
# In the following sets, we do *not* call 'bisect()' with more
# than one level of recursion, because that can be very, very
@@ -268,6 +269,7 @@
return None
def printresult(ui, repo, state, displayer, nodes, good):
+ repo = repo.unfiltered()
if len(nodes) == 1:
# narrowed it down to a single revision
if good:
--- a/mercurial/help.py Tue Jul 09 10:07:35 2019 -0400
+++ b/mercurial/help.py Mon Jul 22 14:00:33 2019 -0400
@@ -320,6 +320,8 @@
loaddoc('config', subdir='internals')),
(['extensions', 'extension'], _('Extension API'),
loaddoc('extensions', subdir='internals')),
+ (['mergestate'], _('Mergestate'),
+ loaddoc('mergestate', subdir='internals')),
(['requirements'], _('Repository Requirements'),
loaddoc('requirements', subdir='internals')),
(['revlogs'], _('Revision Logs'),
@@ -453,7 +455,7 @@
addtopichook('config', inserttweakrc)
def help_(ui, commands, name, unknowncmd=False, full=True, subtopic=None,
- **opts):
+ fullname=None, **opts):
'''
Generate the help for 'name' as unformatted restructured text. If
'name' is None, describe the commands available.
@@ -689,6 +691,8 @@
for names, header, doc in subtopics[name]:
if subtopic in names:
break
+ if not any(subtopic in s[0] for s in subtopics[name]):
+ raise error.UnknownCommand(name)
if not header:
for topic in helptable:
@@ -812,8 +816,16 @@
if unknowncmd:
raise error.UnknownCommand(name)
else:
- msg = _('no such help topic: %s') % name
- hint = _("try 'hg help --keyword %s'") % name
+ if fullname:
+ formatname = fullname
+ else:
+ formatname = name
+ if subtopic:
+ hintname = subtopic
+ else:
+ hintname = name
+ msg = _('no such help topic: %s') % formatname
+ hint = _("try 'hg help --keyword %s'") % hintname
raise error.Abort(msg, hint=hint)
else:
# program name
@@ -848,7 +860,7 @@
termwidth = ui.termwidth() - 2
if textwidth <= 0 or termwidth < textwidth:
textwidth = termwidth
- text = help_(ui, commands, name,
+ text = help_(ui, commands, name, fullname=fullname,
subtopic=subtopic, unknowncmd=unknowncmd, full=full, **opts)
blocks, pruned = minirst.parse(text, keep=keep)
--- a/mercurial/help/config.txt Tue Jul 09 10:07:35 2019 -0400
+++ b/mercurial/help/config.txt Mon Jul 22 14:00:33 2019 -0400
@@ -438,6 +438,10 @@
``commands``
------------
+``commit.post-status``
+ Show status of files in the working directory after successful commit.
+ (default: False)
+
``resolve.confirm``
Confirm before performing action if no filename is passed.
(default: False)
@@ -875,6 +879,15 @@
On some system, Mercurial installation may lack `zstd` supports. Default is `zlib`.
+``bookmarks-in-store``
+ Store bookmarks in .hg/store/. This means that bookmarks are shared when
+ using `hg share` regardless of the `-B` option.
+
+ Repositories with this on-disk format require Mercurial version 5.1.
+
+ Disabled by default.
+
+
``graph``
---------
@@ -1767,6 +1780,11 @@
The option is unused on other formats.
+``showtime``
+ Show time taken as absolute durations, in addition to percentages.
+ Only used by the ``hotpath`` format.
+ (default: true)
+
``progress``
------------
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/help/internals/mergestate.txt Mon Jul 22 14:00:33 2019 -0400
@@ -0,0 +1,80 @@
+The active mergestate is stored in ``.hg/merge`` when a merge is triggered
+by commands like ``hg merge``, ``hg rebase``, etc. until the merge is
+completed or aborted to track the 3-way merge state of individual files.
+
+The contents of the directory are:
+
+Conflicting files
+-----------------
+
+The local version of the conflicting files are stored with their
+filenames as the hash of their paths.
+
+state
+-----
+
+This mergestate file record is used by hg version prior to 2.9.1
+and contains less data than ``state2``. If there is no contradiction
+with ``state2``, we can assume that both are written at the same time.
+In this case, data from ``state2`` is used. Otherwise, we use ``state``.
+We read/write both ``state`` and ``state2`` records to ensure backward
+compatibility.
+
+state2
+------
+
+This record stores a superset of data in ``state``, including new kinds
+of records in the future.
+
+Each record can contain arbitrary content and has an associated type. This
+`type` should be a letter. If `type` is uppercase, the record is mandatory:
+versions of Mercurial that don't support it should abort. If `type` is
+lowercase, the record can be safely ignored.
+
+Currently known records:
+
+| * L: the node of the "local" part of the merge (hexified version)
+| * O: the node of the "other" part of the merge (hexified version)
+| * F: a file to be merged entry
+| * C: a change/delete or delete/change conflict
+| * D: a file that the external merge driver will merge internally
+| (experimental)
+| * P: a path conflict (file vs directory)
+| * m: the external merge driver defined for this merge plus its run state
+| (experimental)
+| * f: a (filename, dictionary) tuple of optional values for a given file
+| * X: unsupported mandatory record type (used in tests)
+| * x: unsupported advisory record type (used in tests)
+| * l: the labels for the parts of the merge.
+
+Merge driver run states (experimental):
+
+| * u: driver-resolved files unmarked -- needs to be run next time we're
+| about to resolve or commit
+| * m: driver-resolved files marked -- only needs to be run before commit
+| * s: success/skipped -- does not need to be run any more
+
+Merge record states (indexed by filename):
+
+| * u: unresolved conflict
+| * r: resolved conflict
+| * pu: unresolved path conflict (file conflicts with directory)
+| * pr: resolved path conflict
+| * d: driver-resolved conflict
+
+The resolve command transitions between 'u' and 'r' for conflicts and
+'pu' and 'pr' for path conflicts.
+
+This format is a list of arbitrary records of the form:
+
+[type][length][content]
+
+`type` is a single character, `length` is a 4 byte integer, and
+`content` is an arbitrary byte sequence of length `length`.
+
+Mercurial versions prior to 3.7 have a bug where if there are
+unsupported mandatory merge records, attempting to clear out the merge
+state with hg update --clean or similar aborts. The 't' record type
+works around that by writing out what those versions treat as an
+advisory record, but later versions interpret as special: the first
+character is the 'real' record type and everything onwards is the data.
--- a/mercurial/help/internals/requirements.txt Tue Jul 09 10:07:35 2019 -0400
+++ b/mercurial/help/internals/requirements.txt Mon Jul 22 14:00:33 2019 -0400
@@ -129,3 +129,16 @@
disappear in a future Mercurial release. The requirement will only
be present on repositories that have opted in to a sparse working
directory.
+
+bookmarksinstore
+==================
+
+Bookmarks are stored in ``.hg/store/`` instead of directly in ``.hg/``
+where they used to be stored. The active bookmark is still stored
+directly in ``.hg/``. This makes them always shared by ``hg share``,
+whether or not ``-B`` was passed.
+
+Support for this requirement was added in Mercurial 5.1 (released
+August 2019). The requirement will only be present on repositories
+that have opted in to this format (by having
+``format.bookmarks-in-store=true`` set when they were created).
--- a/mercurial/help/internals/revlogs.txt Tue Jul 09 10:07:35 2019 -0400
+++ b/mercurial/help/internals/revlogs.txt Mon Jul 22 14:00:33 2019 -0400
@@ -28,8 +28,8 @@
===========
A revlog begins with a 32-bit big endian integer holding version info
-and feature flags. This integer is shared with the first revision
-entry.
+and feature flags. This integer overlaps with the first four bytes of
+the first revision entry.
This integer is logically divided into 2 16-bit shorts. The least
significant half of the integer is the format/version short. The other
@@ -78,10 +78,10 @@
00 03 00 01
v1 + inline + generaldelta
-Following the 32-bit header is the remainder of the first index entry.
-Following that are remaining *index* data. Inlined revision data is
-possibly located between index entries. More on this layout is described
-below.
+Following the 32-bit header is the remaining 60 bytes of the first index
+entry. Following that are additional *index* entries. Inlined revision
+data is possibly located between index entries. More on this inlined
+layout is described below.
Version 1 Format
================
@@ -149,8 +149,12 @@
separate byte container. The offsets from bytes 0-5 and the compressed
length from bytes 8-11 define how to access this data.
-The first 4 bytes of the revlog are shared between the revlog header
-and the 6 byte absolute offset field from the first revlog entry.
+The 6 byte absolute offset field from the first revlog entry overlaps
+with the revlog header. That is, the first 6 bytes of the first revlog
+entry can be split into four bytes containing the header for the revlog
+file and an additional two bytes containing the offset for the first
+entry. Since this is the offset from the beginning of the file for the
+first revision entry, the two bytes will always be set to zero.
Version 2 Format
================
--- a/mercurial/hg.py Tue Jul 09 10:07:35 2019 -0400
+++ b/mercurial/hg.py Mon Jul 22 14:00:33 2019 -0400
@@ -956,31 +956,34 @@
abort=False):
"""Branch merge with node, resolving changes. Return true if any
unresolved conflicts."""
- if not abort:
- stats = mergemod.update(repo, node, branchmerge=True, force=force,
- mergeforce=mergeforce, labels=labels)
- else:
- ms = mergemod.mergestate.read(repo)
- if ms.active():
- # there were conflicts
- node = ms.localctx.hex()
- else:
- # there were no conficts, mergestate was not stored
- node = repo['.'].hex()
+ if abort:
+ return abortmerge(repo.ui, repo)
- repo.ui.status(_("aborting the merge, updating back to"
- " %s\n") % node[:12])
- stats = mergemod.update(repo, node, branchmerge=False, force=True,
- labels=labels)
-
+ stats = mergemod.update(repo, node, branchmerge=True, force=force,
+ mergeforce=mergeforce, labels=labels)
_showstats(repo, stats)
if stats.unresolvedcount:
repo.ui.status(_("use 'hg resolve' to retry unresolved file merges "
"or 'hg merge --abort' to abandon\n"))
- elif remind and not abort:
+ elif remind:
repo.ui.status(_("(branch merge, don't forget to commit)\n"))
return stats.unresolvedcount > 0
+def abortmerge(ui, repo):
+ ms = mergemod.mergestate.read(repo)
+ if ms.active():
+ # there were conflicts
+ node = ms.localctx.hex()
+ else:
+ # there were no conficts, mergestate was not stored
+ node = repo['.'].hex()
+
+ repo.ui.status(_("aborting the merge, updating back to"
+ " %s\n") % node[:12])
+ stats = mergemod.update(repo, node, branchmerge=False, force=True)
+ _showstats(repo, stats)
+ return stats.unresolvedcount > 0
+
def _incoming(displaychlist, subreporecurse, ui, repo, source,
opts, buffered=False):
"""
@@ -1092,9 +1095,9 @@
recurse()
return 0 # exit code is zero since we found outgoing changes
-def verify(repo):
+def verify(repo, level=None):
"""verify the consistency of a repository"""
- ret = verifymod.verify(repo)
+ ret = verifymod.verify(repo, level=level)
# Broken subrepo references in hidden csets don't seem worth worrying about,
# since they can't be pushed/pulled, and --hidden can be used if they are a
--- a/mercurial/hgweb/__init__.py Tue Jul 09 10:07:35 2019 -0400
+++ b/mercurial/hgweb/__init__.py Mon Jul 22 14:00:33 2019 -0400
@@ -38,6 +38,9 @@
- list of virtual:real tuples (multi-repo view)
'''
+ if isinstance(config, pycompat.unicode):
+ raise error.ProgrammingError(
+ 'Mercurial only supports encoded strings: %r' % config)
if ((isinstance(config, bytes) and not os.path.isdir(config)) or
isinstance(config, dict) or isinstance(config, list)):
# create a multi-dir interface
--- a/mercurial/hgweb/hgwebdir_mod.py Tue Jul 09 10:07:35 2019 -0400
+++ b/mercurial/hgweb/hgwebdir_mod.py Mon Jul 22 14:00:33 2019 -0400
@@ -414,14 +414,10 @@
return self.makeindex(req, res, tmpl, subdir)
def _virtualdirs():
- # Check the full virtual path, each parent, and the root ('')
- if virtual != '':
- yield virtual
-
- for p in util.finddirs(virtual):
- yield p
-
- yield ''
+ # Check the full virtual path, and each parent
+ yield virtual
+ for p in util.finddirs(virtual):
+ yield p
for virtualrepo in _virtualdirs():
real = repos.get(virtualrepo)
--- a/mercurial/hgweb/webutil.py Tue Jul 09 10:07:35 2019 -0400
+++ b/mercurial/hgweb/webutil.py Mon Jul 22 14:00:33 2019 -0400
@@ -409,12 +409,6 @@
whyunstable._requires = {'repo', 'ctx'}
-# helper to mark a function as a new-style template keyword; can be removed
-# once old-style function gets unsupported and new-style becomes the default
-def _kwfunc(f):
- f._requires = ()
- return f
-
def commonentry(repo, ctx):
node = scmutil.binnode(ctx)
return {
@@ -439,8 +433,8 @@
'branches': nodebranchdict(repo, ctx),
'tags': nodetagsdict(repo, node),
'bookmarks': nodebookmarksdict(repo, node),
- 'parent': _kwfunc(lambda context, mapping: parents(ctx)),
- 'child': _kwfunc(lambda context, mapping: children(ctx)),
+ 'parent': lambda context, mapping: parents(ctx),
+ 'child': lambda context, mapping: children(ctx),
}
def changelistentry(web, ctx):
@@ -457,9 +451,9 @@
entry = commonentry(repo, ctx)
entry.update({
- 'allparents': _kwfunc(lambda context, mapping: parents(ctx)),
- 'parent': _kwfunc(lambda context, mapping: parents(ctx, rev - 1)),
- 'child': _kwfunc(lambda context, mapping: children(ctx, rev + 1)),
+ 'allparents': lambda context, mapping: parents(ctx),
+ 'parent': lambda context, mapping: parents(ctx, rev - 1),
+ 'child': lambda context, mapping: children(ctx, rev + 1),
'changelogtag': showtags,
'files': files,
})
@@ -529,7 +523,7 @@
changesetbranch=showbranch,
files=templateutil.mappedgenerator(_listfilesgen,
args=(ctx, web.stripecount)),
- diffsummary=_kwfunc(lambda context, mapping: diffsummary(diffstatsgen)),
+ diffsummary=lambda context, mapping: diffsummary(diffstatsgen),
diffstat=diffstats,
archives=web.archivelist(ctx.hex()),
**pycompat.strkwargs(commonentry(web.repo, ctx)))
--- a/mercurial/httppeer.py Tue Jul 09 10:07:35 2019 -0400
+++ b/mercurial/httppeer.py Mon Jul 22 14:00:33 2019 -0400
@@ -382,6 +382,7 @@
self._path = path
self._url = url
self._caps = caps
+ self.limitedarguments = caps is not None and 'httppostargs' not in caps
self._urlopener = opener
self._requestbuilder = requestbuilder
@@ -750,6 +751,9 @@
@interfaceutil.implementer(repository.ipeerv2)
class httpv2peer(object):
+
+ limitedarguments = False
+
def __init__(self, ui, repourl, apipath, opener, requestbuilder,
apidescriptor):
self.ui = ui
--- a/mercurial/localrepo.py Tue Jul 09 10:07:35 2019 -0400
+++ b/mercurial/localrepo.py Mon Jul 22 14:00:33 2019 -0400
@@ -128,8 +128,7 @@
# scmutil.filecache only uses the path for passing back into our
# join(), so we can safely pass a list of paths and locations
super(mixedrepostorecache, self).__init__(*pathsandlocations)
- for path, location in pathsandlocations:
- _cachedfiles.update(pathsandlocations)
+ _cachedfiles.update(pathsandlocations)
def join(self, obj, fnameandlocation):
fname, location = fnameandlocation
@@ -910,6 +909,7 @@
'treemanifest',
REVLOGV2_REQUIREMENT,
SPARSEREVLOG_REQUIREMENT,
+ bookmarks.BOOKMARKS_IN_STORE_REQUIREMENT,
}
_basesupported = supportedformats | {
'store',
@@ -1069,6 +1069,8 @@
# Signature to cached matcher instance.
self._sparsematchercache = {}
+ self._extrafilterid = repoview.extrafilter(ui)
+
def _getvfsward(self, origfunc):
"""build a ward for self.vfs"""
rref = weakref.ref(self)
@@ -1216,11 +1218,14 @@
In other word, there is always only one level of `repoview` "filtering".
"""
+ if self._extrafilterid is not None and '%' not in name:
+ name = name + '%' + self._extrafilterid
+
cls = repoview.newtype(self.unfiltered().__class__)
return cls(self, name, visibilityexceptions)
@mixedrepostorecache(('bookmarks', 'plain'), ('bookmarks.current', 'plain'),
- ('00changelog.i', ''))
+ ('bookmarks', ''), ('00changelog.i', ''))
def _bookmarks(self):
return bookmarks.bmstore(self)
@@ -1982,7 +1987,7 @@
(self.vfs, 'journal.dirstate'),
(self.vfs, 'journal.branch'),
(self.vfs, 'journal.desc'),
- (self.vfs, 'journal.bookmarks'),
+ (bookmarks.bookmarksvfs(self), 'journal.bookmarks'),
(self.svfs, 'journal.phaseroots'))
def undofiles(self):
@@ -1997,8 +2002,9 @@
encoding.fromlocal(self.dirstate.branch()))
self.vfs.write("journal.desc",
"%d\n%s\n" % (len(self), desc))
- self.vfs.write("journal.bookmarks",
- self.vfs.tryread("bookmarks"))
+ bookmarksvfs = bookmarks.bookmarksvfs(self)
+ bookmarksvfs.write("journal.bookmarks",
+ bookmarksvfs.tryread("bookmarks"))
self.svfs.write("journal.phaseroots",
self.svfs.tryread("phaseroots"))
@@ -2068,8 +2074,9 @@
vfsmap = {'plain': self.vfs, '': self.svfs}
transaction.rollback(self.svfs, vfsmap, 'undo', ui.warn,
checkambigfiles=_cachedfiles)
- if self.vfs.exists('undo.bookmarks'):
- self.vfs.rename('undo.bookmarks', 'bookmarks', checkambig=True)
+ bookmarksvfs = bookmarks.bookmarksvfs(self)
+ if bookmarksvfs.exists('undo.bookmarks'):
+ bookmarksvfs.rename('undo.bookmarks', 'bookmarks', checkambig=True)
if self.svfs.exists('undo.phaseroots'):
self.svfs.rename('undo.phaseroots', 'phaseroots', checkambig=True)
self.invalidate()
@@ -2152,6 +2159,8 @@
for ctx in self['.'].parents():
ctx.manifest() # accessing the manifest is enough
+ # accessing fnode cache warms the cache
+ tagsmod.fnoderevs(self.ui, unfi, unfi.changelog.revs())
# accessing tags warm the cache
self.tags()
self.filtered('served').tags()
@@ -2362,7 +2371,10 @@
node = fctx.filenode()
if node in [fparent1, fparent2]:
self.ui.debug('reusing %s filelog entry\n' % fname)
- if manifest1.flags(fname) != fctx.flags():
+ if ((fparent1 != nullid and
+ manifest1.flags(fname) != fctx.flags()) or
+ (fparent2 != nullid and
+ manifest2.flags(fname) != fctx.flags())):
changelist.append(fname)
return node
@@ -2556,17 +2568,17 @@
_('note: commit message saved in %s\n') % msgfn)
raise
- def commithook(node=hex(ret), parent1=hookp1, parent2=hookp2):
+ def commithook():
# hack for command that use a temporary commit (eg: histedit)
# temporary commit got stripped before hook release
if self.changelog.hasnode(ret):
- self.hook("commit", node=node, parent1=parent1,
- parent2=parent2)
+ self.hook("commit", node=hex(ret), parent1=hookp1,
+ parent2=hookp2)
self._afterlock(commithook)
return ret
@unfilteredmethod
- def commitctx(self, ctx, error=False):
+ def commitctx(self, ctx, error=False, origctx=None):
"""Add a new revision to current repository.
Revision information is passed via the context argument.
@@ -2574,6 +2586,12 @@
modified/added/removed files. On merge, it may be wider than the
ctx.files() to be committed, since any file nodes derived directly
from p1 or p2 are excluded from the committed ctx.files().
+
+ origctx is for convert to work around the problem that bug
+ fixes to the files list in changesets change hashes. For
+ convert to be the identity, it can pass an origctx and this
+ function will use the same files list when it makes sense to
+ do so.
"""
p1, p2 = ctx.p1(), ctx.p2()
@@ -2581,10 +2599,13 @@
writecopiesto = self.ui.config('experimental', 'copies.write-to')
writefilecopymeta = writecopiesto != 'changeset-only'
+ writechangesetcopy = (writecopiesto in
+ ('changeset-only', 'compatibility'))
p1copies, p2copies = None, None
- if writecopiesto in ('changeset-only', 'compatibility'):
+ if writechangesetcopy:
p1copies = ctx.p1copies()
p2copies = ctx.p2copies()
+ filesadded, filesremoved = None, None
with self.lock(), self.transaction("commit") as tr:
trp = weakref.proxy(tr)
@@ -2593,6 +2614,9 @@
self.ui.debug('reusing known manifest\n')
mn = ctx.manifestnode()
files = ctx.files()
+ if writechangesetcopy:
+ filesadded = ctx.filesadded()
+ filesremoved = ctx.filesremoved()
elif ctx.files():
m1ctx = p1.manifestctx()
m2ctx = p2.manifestctx()
@@ -2633,10 +2657,51 @@
raise
# update manifest
- removed = [f for f in sorted(removed) if f in m1 or f in m2]
- drop = [f for f in removed if f in m]
+ removed = [f for f in removed if f in m1 or f in m2]
+ drop = sorted([f for f in removed if f in m])
for f in drop:
del m[f]
+ if p2.rev() != nullrev:
+ @util.cachefunc
+ def mas():
+ p1n = p1.node()
+ p2n = p2.node()
+ cahs = self.changelog.commonancestorsheads(p1n, p2n)
+ if not cahs:
+ cahs = [nullrev]
+ return [self[r].manifest() for r in cahs]
+ def deletionfromparent(f):
+ # When a file is removed relative to p1 in a merge, this
+ # function determines whether the absence is due to a
+ # deletion from a parent, or whether the merge commit
+ # itself deletes the file. We decide this by doing a
+ # simplified three way merge of the manifest entry for
+ # the file. There are two ways we decide the merge
+ # itself didn't delete a file:
+ # - neither parent (nor the merge) contain the file
+ # - exactly one parent contains the file, and that
+ # parent has the same filelog entry as the merge
+ # ancestor (or all of them if there two). In other
+ # words, that parent left the file unchanged while the
+ # other one deleted it.
+ # One way to think about this is that deleting a file is
+ # similar to emptying it, so the list of changed files
+ # should be similar either way. The computation
+ # described above is not done directly in _filecommit
+ # when creating the list of changed files, however
+ # it does something very similar by comparing filelog
+ # nodes.
+ if f in m1:
+ return (f not in m2
+ and all(f in ma and ma.find(f) == m1.find(f)
+ for ma in mas()))
+ elif f in m2:
+ return all(f in ma and ma.find(f) == m2.find(f)
+ for ma in mas())
+ else:
+ return True
+ removed = [f for f in removed if not deletionfromparent(f)]
+
files = changed + removed
md = None
if not files:
@@ -2659,8 +2724,13 @@
mn = mctx.write(trp, linkrev,
p1.manifestnode(), p2.manifestnode(),
added, drop, match=self.narrowmatch())
+
+ if writechangesetcopy:
+ filesadded = [f for f in changed
+ if not (f in m1 or f in m2)]
+ filesremoved = removed
else:
- self.ui.debug('reusing manifest form p1 (listed files '
+ self.ui.debug('reusing manifest from p1 (listed files '
'actually unchanged)\n')
mn = p1.manifestnode()
else:
@@ -2668,13 +2738,26 @@
mn = p1.manifestnode()
files = []
+ if writecopiesto == 'changeset-only':
+ # If writing only to changeset extras, use None to indicate that
+ # no entry should be written. If writing to both, write an empty
+ # entry to prevent the reader from falling back to reading
+ # filelogs.
+ p1copies = p1copies or None
+ p2copies = p2copies or None
+ filesadded = filesadded or None
+ filesremoved = filesremoved or None
+
+ if origctx and origctx.manifestnode() == mn:
+ files = origctx.files()
+
# update changelog
self.ui.note(_("committing changelog\n"))
self.changelog.delayupdate(tr)
n = self.changelog.add(mn, files, ctx.description(),
trp, p1.node(), p2.node(),
user, ctx.date(), ctx.extra().copy(),
- p1copies, p2copies)
+ p1copies, p2copies, filesadded, filesremoved)
xp1, xp2 = p1.hex(), p2 and p2.hex() or ''
self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
parent2=xp2)
@@ -3013,6 +3096,9 @@
if createopts.get('lfs'):
requirements.add('lfs')
+ if ui.configbool('format', 'bookmarks-in-store'):
+ requirements.add(bookmarks.BOOKMARKS_IN_STORE_REQUIREMENT)
+
return requirements
def filterknowncreateopts(ui, createopts):
--- a/mercurial/logcmdutil.py Tue Jul 09 10:07:35 2019 -0400
+++ b/mercurial/logcmdutil.py Mon Jul 22 14:00:33 2019 -0400
@@ -743,10 +743,15 @@
return match
expr = _makerevset(repo, match, pats, slowpath, opts)
- if opts.get('graph') and opts.get('rev'):
+ if opts.get('graph'):
# User-specified revs might be unsorted, but don't sort before
# _makerevset because it might depend on the order of revs
- if not (revs.isdescending() or revs.istopo()):
+ if repo.ui.configbool('experimental', 'log.topo'):
+ if not revs.istopo():
+ revs = dagop.toposort(revs, repo.changelog.parentrevs)
+ # TODO: try to iterate the set lazily
+ revs = revset.baseset(list(revs), istopo=True)
+ elif not (revs.isdescending() or revs.istopo()):
revs.sort(reverse=True)
if expr:
matcher = revset.match(None, expr)
@@ -857,7 +862,7 @@
return templ.renderdefault(props)
return formatnode
-def displaygraph(ui, repo, dag, displayer, edgefn, getrenamed=None, props=None):
+def displaygraph(ui, repo, dag, displayer, edgefn, getcopies=None, props=None):
props = props or {}
formatnode = _graphnodeformatter(ui, displayer)
state = graphmod.asciistate()
@@ -885,13 +890,7 @@
for rev, type, ctx, parents in dag:
char = formatnode(repo, ctx)
- copies = None
- if getrenamed and ctx.rev():
- copies = []
- for fn in ctx.files():
- rename = getrenamed(fn, ctx.rev())
- if rename:
- copies.append((fn, rename))
+ copies = getcopies(ctx) if getcopies else None
edges = edgefn(type, char, state, rev, parents)
firstedge = next(edges)
width = firstedge[2]
@@ -910,16 +909,10 @@
revdag = graphmod.dagwalker(repo, revs)
displaygraph(ui, repo, revdag, displayer, graphmod.asciiedges, getrenamed)
-def displayrevs(ui, repo, revs, displayer, getrenamed):
+def displayrevs(ui, repo, revs, displayer, getcopies):
for rev in revs:
ctx = repo[rev]
- copies = None
- if getrenamed is not None and rev:
- copies = []
- for fn in ctx.files():
- rename = getrenamed(fn, rev)
- if rename:
- copies.append((fn, rename))
+ copies = getcopies(ctx) if getcopies else None
displayer.show(ctx, copies=copies)
displayer.flush(ctx)
displayer.close()
--- a/mercurial/manifest.py Tue Jul 09 10:07:35 2019 -0400
+++ b/mercurial/manifest.py Mon Jul 22 14:00:33 2019 -0400
@@ -506,9 +506,9 @@
if match(fn):
yield fn
- # for dirstate.walk, files=['.'] means "walk the whole tree".
+ # for dirstate.walk, files=[''] means "walk the whole tree".
# follow that here, too
- fset.discard('.')
+ fset.discard('')
for fn in sorted(fset):
if not self.hasdir(fn):
@@ -1078,9 +1078,9 @@
fset.remove(fn)
yield fn
- # for dirstate.walk, files=['.'] means "walk the whole tree".
+ # for dirstate.walk, files=[''] means "walk the whole tree".
# follow that here, too
- fset.discard('.')
+ fset.discard('')
for fn in sorted(fset):
if not self.hasdir(fn):
@@ -1088,7 +1088,7 @@
def _walk(self, match):
'''Recursively generates matching file names for walk().'''
- visit = match.visitchildrenset(self._dir[:-1] or '.')
+ visit = match.visitchildrenset(self._dir[:-1])
if not visit:
return
@@ -1116,7 +1116,7 @@
'''recursively generate a new manifest filtered by the match argument.
'''
- visit = match.visitchildrenset(self._dir[:-1] or '.')
+ visit = match.visitchildrenset(self._dir[:-1])
if visit == 'all':
return self.copy()
ret = treemanifest(self._dir)
@@ -1275,7 +1275,7 @@
return m._dirs.get(d, emptytree)._node
# let's skip investigating things that `match` says we do not need.
- visit = match.visitchildrenset(self._dir[:-1] or '.')
+ visit = match.visitchildrenset(self._dir[:-1])
visit = self._loadchildrensetlazy(visit)
if visit == 'this' or visit == 'all':
visit = None
@@ -1294,7 +1294,7 @@
If `matcher` is provided, it only returns subtrees that match.
"""
- if matcher and not matcher.visitdir(self._dir[:-1] or '.'):
+ if matcher and not matcher.visitdir(self._dir[:-1]):
return
if not matcher or matcher(self._dir[:-1]):
yield self
@@ -1417,6 +1417,10 @@
self.write()
self._read = False
+# and upper bound of what we expect from compression
+# (real live value seems to be "3")
+MAXCOMPRESSION = 3
+
@interfaceutil.implementer(repository.imanifeststorage)
class manifestrevlog(object):
'''A revlog that stores manifest texts. This is responsible for caching the
@@ -1467,7 +1471,8 @@
self._revlog = revlog.revlog(opener, indexfile,
# only root indexfile is cached
checkambig=not bool(tree),
- mmaplargeindex=True)
+ mmaplargeindex=True,
+ upperboundcomp=MAXCOMPRESSION)
self.index = self._revlog.index
self.version = self._revlog.version
@@ -1526,8 +1531,8 @@
_checkforbidden(added)
# combine the changed lists into one sorted iterator
- work = heapq.merge([(x, False) for x in added],
- [(x, True) for x in removed])
+ work = heapq.merge([(x, False) for x in sorted(added)],
+ [(x, True) for x in sorted(removed)])
arraytext, deltatext = m.fastdelta(self.fulltextcache[p1], work)
cachedelta = self._revlog.rev(p1), deltatext
@@ -1725,7 +1730,7 @@
return self._dirmancache[tree][node]
if not self._narrowmatch.always():
- if not self._narrowmatch.visitdir(tree[:-1] or '.'):
+ if not self._narrowmatch.visitdir(tree[:-1]):
return excludeddirmanifestctx(tree, node)
if tree:
if self._rootstore._treeondisk:
@@ -1918,7 +1923,7 @@
def _storage(self):
narrowmatch = self._manifestlog._narrowmatch
if not narrowmatch.always():
- if not narrowmatch.visitdir(self._dir[:-1] or '.'):
+ if not narrowmatch.visitdir(self._dir[:-1]):
return excludedmanifestrevlog(self._dir)
return self._manifestlog.getstorage(self._dir)
--- a/mercurial/match.py Tue Jul 09 10:07:35 2019 -0400
+++ b/mercurial/match.py Mon Jul 22 14:00:33 2019 -0400
@@ -17,6 +17,7 @@
encoding,
error,
pathutil,
+ policy,
pycompat,
util,
)
@@ -24,6 +25,8 @@
stringutil,
)
+rustmod = policy.importrust('filepatterns')
+
allpatternkinds = ('re', 'glob', 'path', 'relglob', 'relpath', 'relre',
'rootglob',
'listfile', 'listfile0', 'set', 'include', 'subinclude',
@@ -305,9 +308,6 @@
def __call__(self, fn):
return self.matchfn(fn)
- def __iter__(self):
- for f in self._files:
- yield f
# Callbacks related to how the matcher is used by dirstate.walk.
# Subscribers to these events must monkeypatch the matcher object.
def bad(self, f, msg):
@@ -377,7 +377,7 @@
the following values (assuming the implementation of visitchildrenset
is capable of recognizing this; some implementations are not).
- '.' -> {'foo', 'qux'}
+ '' -> {'foo', 'qux'}
'baz' -> set()
'foo' -> {'bar'}
# Ideally this would be 'all', but since the prefix nature of matchers
@@ -480,11 +480,19 @@
or pycompat.byterepr(self.matchfn))
return '<predicatenmatcher pred=%s>' % s
+def normalizerootdir(dir, funcname):
+ if dir == '.':
+ util.nouideprecwarn("match.%s() no longer accepts "
+ "'.', use '' instead." % funcname, '5.1')
+ return ''
+ return dir
+
+
class patternmatcher(basematcher):
"""Matches a set of (kind, pat, source) against a 'root' directory.
>>> kindpats = [
- ... (b're', b'.*\.c$', b''),
+ ... (b're', br'.*\.c$', b''),
... (b'path', b'foo/a', b''),
... (b'relpath', b'b', b''),
... (b'glob', b'*.h', b''),
@@ -504,7 +512,7 @@
True
>>> m.files()
- ['.', 'foo/a', 'b', '.']
+ ['', 'foo/a', 'b', '']
>>> m.exact(b'foo/a')
True
>>> m.exact(b'b')
@@ -522,13 +530,13 @@
@propertycache
def _dirs(self):
- return set(util.dirs(self._fileset)) | {'.'}
+ return set(util.dirs(self._fileset))
def visitdir(self, dir):
+ dir = normalizerootdir(dir, 'visitdir')
if self._prefix and dir in self._fileset:
return 'all'
- return ('.' in self._fileset or
- dir in self._fileset or
+ return (dir in self._fileset or
dir in self._dirs or
any(parentdir in self._fileset
for parentdir in util.finddirs(dir)))
@@ -561,7 +569,7 @@
addpath(f)
def addpath(self, path):
- if path == '.':
+ if path == '':
return
dirs = self._dirs
findsplitdirs = _dirchildren._findsplitdirs
@@ -575,16 +583,15 @@
# yields (dirname, basename) tuples, walking back to the root. This is
# very similar to util.finddirs, except:
# - produces a (dirname, basename) tuple, not just 'dirname'
- # - includes root dir
# Unlike manifest._splittopdir, this does not suffix `dirname` with a
- # slash, and produces '.' for the root instead of ''.
+ # slash.
oldpos = len(path)
pos = path.rfind('/')
while pos != -1:
yield path[:pos], path[pos + 1:oldpos]
oldpos = pos
pos = path.rfind('/', 0, pos)
- yield '.', path[:oldpos]
+ yield '', path[:oldpos]
def get(self, path):
return self._dirs.get(path, set())
@@ -603,13 +610,13 @@
self._dirs = set(dirs)
# parents are directories which are non-recursively included because
# they are needed to get to items in _dirs or _roots.
- self._parents = set(parents)
+ self._parents = parents
def visitdir(self, dir):
+ dir = normalizerootdir(dir, 'visitdir')
if self._prefix and dir in self._roots:
return 'all'
- return ('.' in self._roots or
- dir in self._roots or
+ return (dir in self._roots or
dir in self._dirs or
dir in self._parents or
any(parentdir in self._roots
@@ -632,7 +639,7 @@
return 'all'
# Note: this does *not* include the 'dir in self._parents' case from
# visitdir, that's handled below.
- if ('.' in self._roots or
+ if ('' in self._roots or
dir in self._roots or
dir in self._dirs or
any(parentdir in self._roots
@@ -651,7 +658,7 @@
r'''Matches the input files exactly. They are interpreted as paths, not
patterns (so no kind-prefixes).
- >>> m = exactmatcher([b'a.txt', b're:.*\.c$'])
+ >>> m = exactmatcher([b'a.txt', br're:.*\.c$'])
>>> m(b'a.txt')
True
>>> m(b'b.txt')
@@ -664,7 +671,7 @@
So pattern 're:.*\.c$' is not considered as a regex, but as a file name
>>> m(b'main.c')
False
- >>> m(b're:.*\.c$')
+ >>> m(br're:.*\.c$')
True
'''
@@ -680,22 +687,25 @@
@propertycache
def _dirs(self):
- return set(util.dirs(self._fileset)) | {'.'}
+ return set(util.dirs(self._fileset))
def visitdir(self, dir):
+ dir = normalizerootdir(dir, 'visitdir')
return dir in self._dirs
def visitchildrenset(self, dir):
+ dir = normalizerootdir(dir, 'visitchildrenset')
+
if not self._fileset or dir not in self._dirs:
return set()
- candidates = self._fileset | self._dirs - {'.'}
- if dir != '.':
+ candidates = self._fileset | self._dirs - {''}
+ if dir != '':
d = dir + '/'
candidates = set(c[len(d):] for c in candidates if
c.startswith(d))
# self._dirs includes all of the directories, recursively, so if
- # we're attempting to match foo/bar/baz.txt, it'll have '.', 'foo',
+ # we're attempting to match foo/bar/baz.txt, it'll have '', 'foo',
# 'foo/bar' in it. Thus we can safely ignore a candidate that has a
# '/' in it, indicating a it's for a subdir-of-a-subdir; the
# immediate subdir will be in there without a slash.
@@ -769,7 +779,7 @@
# Possible values for m1: set(...), set()
# Possible values for m2: 'this', set(...)
# We ignore m2's set results. They're possibly incorrect:
- # m1 = path:dir/subdir, m2=rootfilesin:dir, visitchildrenset('.'):
+ # m1 = path:dir/subdir, m2=rootfilesin:dir, visitchildrenset(''):
# m1 returns {'dir'}, m2 returns {'dir'}, if we subtracted we'd
# return set(), which is *not* correct, we still need to visit 'dir'!
return m1_set
@@ -915,14 +925,16 @@
return self._matcher.matchfn(self._path + "/" + f)
def visitdir(self, dir):
- if dir == '.':
+ dir = normalizerootdir(dir, 'visitdir')
+ if dir == '':
dir = self._path
else:
dir = self._path + "/" + dir
return self._matcher.visitdir(dir)
def visitchildrenset(self, dir):
- if dir == '.':
+ dir = normalizerootdir(dir, 'visitchildrenset')
+ if dir == '':
dir = self._path
else:
dir = self._path + "/" + dir
@@ -991,18 +1003,18 @@
@propertycache
def _pathdirs(self):
- return set(util.finddirs(self._path)) | {'.'}
+ return set(util.finddirs(self._path))
def visitdir(self, dir):
if dir == self._path:
- return self._matcher.visitdir('.')
+ return self._matcher.visitdir('')
if dir.startswith(self._pathprefix):
return self._matcher.visitdir(dir[len(self._pathprefix):])
return dir in self._pathdirs
def visitchildrenset(self, dir):
if dir == self._path:
- return self._matcher.visitchildrenset('.')
+ return self._matcher.visitchildrenset('')
if dir.startswith(self._pathprefix):
return self._matcher.visitchildrenset(dir[len(self._pathprefix):])
if dir in self._pathdirs:
@@ -1075,7 +1087,7 @@
def patkind(pattern, default=None):
'''If pattern is 'kind:pat' with a known kind, return kind.
- >>> patkind(b're:.*\.c$')
+ >>> patkind(br're:.*\.c$')
're'
>>> patkind(b'glob:*.c')
'glob'
@@ -1178,9 +1190,23 @@
return res
def _regex(kind, pat, globsuffix):
- '''Convert a (normalized) pattern of any kind into a regular expression.
+ '''Convert a (normalized) pattern of any kind into a
+ regular expression.
globsuffix is appended to the regexp of globs.'''
- if not pat:
+
+ if rustmod is not None:
+ try:
+ return rustmod.build_single_regex(
+ kind,
+ pat,
+ globsuffix
+ )
+ except rustmod.PatternError:
+ raise error.ProgrammingError(
+ 'not a regex pattern: %s:%s' % (kind, pat)
+ )
+
+ if not pat and kind in ('glob', 'relpath'):
return ''
if kind == 're':
return pat
@@ -1324,13 +1350,17 @@
if '[' in p or '{' in p or '*' in p or '?' in p:
break
root.append(p)
- r.append('/'.join(root) or '.')
+ r.append('/'.join(root))
elif kind in ('relpath', 'path'):
- r.append(pat or '.')
+ if pat == '.':
+ pat = ''
+ r.append(pat)
elif kind in ('rootfilesin',):
- d.append(pat or '.')
+ if pat == '.':
+ pat = ''
+ d.append(pat)
else: # relglob, re, relre
- r.append('.')
+ r.append('')
return r, d
def _roots(kindpats):
@@ -1347,31 +1377,33 @@
Returns a tuple of (roots, dirs, parents).
- >>> _rootsdirsandparents(
+ >>> r = _rootsdirsandparents(
... [(b'glob', b'g/h/*', b''), (b'glob', b'g/h', b''),
... (b'glob', b'g*', b'')])
- (['g/h', 'g/h', '.'], [], ['g', '.'])
- >>> _rootsdirsandparents(
+ >>> print(r[0:2], sorted(r[2])) # the set has an unstable output
+ (['g/h', 'g/h', ''], []) ['', 'g']
+ >>> r = _rootsdirsandparents(
... [(b'rootfilesin', b'g/h', b''), (b'rootfilesin', b'', b'')])
- ([], ['g/h', '.'], ['g', '.'])
- >>> _rootsdirsandparents(
+ >>> print(r[0:2], sorted(r[2])) # the set has an unstable output
+ ([], ['g/h', '']) ['', 'g']
+ >>> r = _rootsdirsandparents(
... [(b'relpath', b'r', b''), (b'path', b'p/p', b''),
... (b'path', b'', b'')])
- (['r', 'p/p', '.'], [], ['p', '.'])
- >>> _rootsdirsandparents(
+ >>> print(r[0:2], sorted(r[2])) # the set has an unstable output
+ (['r', 'p/p', ''], []) ['', 'p']
+ >>> r = _rootsdirsandparents(
... [(b'relglob', b'rg*', b''), (b're', b're/', b''),
... (b'relre', b'rr', b'')])
- (['.', '.', '.'], [], ['.'])
+ >>> print(r[0:2], sorted(r[2])) # the set has an unstable output
+ (['', '', ''], []) ['']
'''
r, d = _patternrootsanddirs(kindpats)
- p = []
- # Append the parents as non-recursive/exact directories, since they must be
+ p = set()
+ # Add the parents as non-recursive/exact directories, since they must be
# scanned to get to either the roots or the other exact directories.
- p.extend(util.dirs(d))
- p.extend(util.dirs(r))
- # util.dirs() does not include the root directory, so add it manually
- p.append('.')
+ p.update(util.dirs(d))
+ p.update(util.dirs(r))
# FIXME: all uses of this function convert these to sets, do so before
# returning.
@@ -1421,9 +1453,24 @@
pattern # pattern of the current default type
if sourceinfo is set, returns a list of tuples:
- (pattern, lineno, originalline). This is useful to debug ignore patterns.
+ (pattern, lineno, originalline).
+ This is useful to debug ignore patterns.
'''
+ if rustmod is not None:
+ result, warnings = rustmod.read_pattern_file(
+ filepath,
+ bool(warn),
+ sourceinfo,
+ )
+
+ for warning_params in warnings:
+ # Can't be easily emitted from Rust, because it would require
+ # a mechanism for both gettext and calling the `warn` function.
+ warn(_("%s: ignoring invalid syntax '%s'\n") % warning_params)
+
+ return result
+
syntaxes = {
're': 'relre:',
'regexp': 'relre:',
--- a/mercurial/merge.py Tue Jul 09 10:07:35 2019 -0400
+++ b/mercurial/merge.py Mon Jul 22 14:00:33 2019 -0400
@@ -10,6 +10,7 @@
import errno
import hashlib
import shutil
+import stat
import struct
from .i18n import _
@@ -683,7 +684,7 @@
def recordactions(self):
"""record remove/add/get actions in the dirstate"""
branchmerge = self._repo.dirstate.p2() != nullid
- recordupdates(self._repo, self.actions(), branchmerge)
+ recordupdates(self._repo, self.actions(), branchmerge, None)
def queueremove(self, f):
"""queues a file to be removed from the dirstate
@@ -1380,7 +1381,6 @@
# Pick the best bid for each file
repo.ui.note(_('\nauction for merging merge bids\n'))
actions = {}
- dms = [] # filenames that have dm actions
for f, bids in sorted(fbids.items()):
# bids is a mapping from action method to list af actions
# Consensus?
@@ -1389,8 +1389,6 @@
if all(a == l[0] for a in l[1:]): # len(bids) is > 1
repo.ui.note(_(" %s: consensus for %s\n") % (f, m))
actions[f] = l[0]
- if m == ACTION_DIR_RENAME_MOVE_LOCAL:
- dms.append(f)
continue
# If keep is an option, just do it.
if ACTION_KEEP in bids:
@@ -1415,18 +1413,7 @@
repo.ui.warn(_(' %s: ambiguous merge - picked %s action\n') %
(f, m))
actions[f] = l[0]
- if m == ACTION_DIR_RENAME_MOVE_LOCAL:
- dms.append(f)
continue
- # Work around 'dm' that can cause multiple actions for the same file
- for f in dms:
- dm, (f0, flags), msg = actions[f]
- assert dm == ACTION_DIR_RENAME_MOVE_LOCAL, dm
- if f0 in actions and actions[f0][0] == ACTION_REMOVE:
- # We have one bid for removing a file and another for moving it.
- # These two could be merged as first move and then delete ...
- # but instead drop moving and just delete.
- del actions[f]
repo.ui.note(_('end of auction\n\n'))
if wctx.rev() is None:
@@ -1478,13 +1465,17 @@
repo.ui.warn(_("current directory was removed\n"
"(consider changing to repo root: %s)\n") % repo.root)
-def batchget(repo, mctx, wctx, actions):
+def batchget(repo, mctx, wctx, wantfiledata, actions):
"""apply gets to the working directory
mctx is the context to get from
- yields tuples for progress updates
+ Yields arbitrarily many (False, tuple) for progress updates, followed by
+ exactly one (True, filedata). When wantfiledata is false, filedata is an
+ empty dict. When wantfiledata is true, filedata[f] is a triple (mode, size,
+ mtime) of the file f written for each action.
"""
+ filedata = {}
verbose = repo.ui.verbose
fctx = mctx.filectx
ui = repo.ui
@@ -1508,16 +1499,24 @@
if repo.wvfs.lexists(conflicting):
orig = scmutil.backuppath(ui, repo, conflicting)
util.rename(repo.wjoin(conflicting), orig)
- wctx[f].clearunknown()
+ wfctx = wctx[f]
+ wfctx.clearunknown()
atomictemp = ui.configbool("experimental", "update.atomic-file")
- wctx[f].write(fctx(f).data(), flags, backgroundclose=True,
- atomictemp=atomictemp)
+ size = wfctx.write(fctx(f).data(), flags,
+ backgroundclose=True,
+ atomictemp=atomictemp)
+ if wantfiledata:
+ s = wfctx.lstat()
+ mode = s.st_mode
+ mtime = s[stat.ST_MTIME]
+ filedata[f] = ((mode, size, mtime)) # for dirstate.normal
if i == 100:
- yield i, f
+ yield False, (i, f)
i = 0
i += 1
if i > 0:
- yield i, f
+ yield False, (i, f)
+ yield True, filedata
def _prefetchfiles(repo, ctx, actions):
"""Invoke ``scmutil.prefetchfiles()`` for the files relevant to the dict
@@ -1564,14 +1563,17 @@
ACTION_PATH_CONFLICT,
ACTION_PATH_CONFLICT_RESOLVE))
-def applyupdates(repo, actions, wctx, mctx, overwrite, labels=None):
+def applyupdates(repo, actions, wctx, mctx, overwrite, wantfiledata,
+ labels=None):
"""apply the merge action list to the working directory
wctx is the working copy context
mctx is the context to be merged into the working copy
- Return a tuple of counts (updated, merged, removed, unresolved) that
- describes how many files were affected by the update.
+ Return a tuple of (counts, filedata), where counts is a tuple
+ (updated, merged, removed, unresolved) that describes how many
+ files were affected by the update, and filedata is as described in
+ batchget.
"""
_prefetchfiles(repo, mctx, actions)
@@ -1663,11 +1665,18 @@
# get in parallel.
threadsafe = repo.ui.configbool('experimental',
'worker.wdir-get-thread-safe')
- prog = worker.worker(repo.ui, cost, batchget, (repo, mctx, wctx),
+ prog = worker.worker(repo.ui, cost, batchget,
+ (repo, mctx, wctx, wantfiledata),
actions[ACTION_GET],
- threadsafe=threadsafe)
- for i, item in prog:
- progress.increment(step=i, item=item)
+ threadsafe=threadsafe,
+ hasretval=True)
+ getfiledata = {}
+ for final, res in prog:
+ if final:
+ getfiledata = res
+ else:
+ i, item = res
+ progress.increment(step=i, item=item)
updated = len(actions[ACTION_GET])
if [a for a in actions[ACTION_GET] if a[0] == '.hgsubstate']:
@@ -1792,6 +1801,10 @@
mfiles = set(a[0] for a in actions[ACTION_MERGE])
for k, acts in extraactions.iteritems():
actions[k].extend(acts)
+ if k == ACTION_GET and wantfiledata:
+ # no filedata until mergestate is updated to provide it
+ for a in acts:
+ getfiledata[a[0]] = None
# Remove these files from actions[ACTION_MERGE] as well. This is
# important because in recordupdates, files in actions[ACTION_MERGE]
# are processed after files in other actions, and the merge driver
@@ -1814,9 +1827,10 @@
if a[0] in mfiles]
progress.complete()
- return updateresult(updated, merged, removed, unresolved)
+ assert len(getfiledata) == (len(actions[ACTION_GET]) if wantfiledata else 0)
+ return updateresult(updated, merged, removed, unresolved), getfiledata
-def recordupdates(repo, actions, branchmerge):
+def recordupdates(repo, actions, branchmerge, getfiledata):
"record merge actions to the dirstate"
# remove (must come first)
for f, args, msg in actions.get(ACTION_REMOVE, []):
@@ -1864,7 +1878,8 @@
if branchmerge:
repo.dirstate.otherparent(f)
else:
- repo.dirstate.normal(f)
+ parentfiledata = getfiledata[f] if getfiledata else None
+ repo.dirstate.normal(f, parentfiledata=parentfiledata)
# merge
for f, args, msg in actions.get(ACTION_MERGE, []):
@@ -1991,14 +2006,10 @@
wc = repo[None]
pl = wc.parents()
p1 = pl[0]
- pas = [None]
+ p2 = repo[node]
if ancestor is not None:
pas = [repo[ancestor]]
-
- overwrite = force and not branchmerge
-
- p2 = repo[node]
- if pas[0] is None:
+ else:
if repo.ui.configlist('merge', 'preferancestor') == ['*']:
cahs = repo.changelog.commonancestorsheads(p1.node(), p2.node())
pas = [repo[anc] for anc in (sorted(cahs) or [nullid])]
@@ -2007,6 +2018,7 @@
fp1, fp2, xp1, xp2 = p1.node(), p2.node(), bytes(p1), bytes(p2)
+ overwrite = force and not branchmerge
### check phase
if not overwrite:
if len(pl) > 1:
@@ -2183,12 +2195,15 @@
'fsmonitor enabled; enable fsmonitor to improve performance; '
'see "hg help -e fsmonitor")\n'))
- stats = applyupdates(repo, actions, wc, p2, overwrite, labels=labels)
+ updatedirstate = not partial and not wc.isinmemory()
+ wantfiledata = updatedirstate and not branchmerge
+ stats, getfiledata = applyupdates(repo, actions, wc, p2, overwrite,
+ wantfiledata, labels=labels)
- if not partial and not wc.isinmemory():
+ if updatedirstate:
with repo.dirstate.parentchange():
repo.setparents(fp1, fp2)
- recordupdates(repo, actions, branchmerge)
+ recordupdates(repo, actions, branchmerge, getfiledata)
# update completed, clear state
util.unlink(repo.vfs.join('updatestate'))
@@ -2219,7 +2234,7 @@
pctx - merge base, usually ctx.p1()
labels - merge labels eg ['local', 'graft']
keepparent - keep second parent if any
- keepparent - if unresolved, keep parent used for the merge
+ keepconflictparent - if unresolved, keep parent used for the merge
"""
# If we're grafting a descendant onto an ancestor, be sure to pass
--- a/mercurial/minirst.py Tue Jul 09 10:07:35 2019 -0400
+++ b/mercurial/minirst.py Mon Jul 22 14:00:33 2019 -0400
@@ -44,6 +44,9 @@
def subsubsubsection(s):
return "%s\n%s\n\n" % (s, "." * encoding.colwidth(s))
+def subsubsubsubsection(s):
+ return "%s\n%s\n\n" % (s, "'" * encoding.colwidth(s))
+
def replace(text, substs):
'''
Apply a list of (find, replace) pairs to a text.
--- a/mercurial/narrowspec.py Tue Jul 09 10:07:35 2019 -0400
+++ b/mercurial/narrowspec.py Mon Jul 22 14:00:33 2019 -0400
@@ -7,14 +7,13 @@
from __future__ import absolute_import
-import errno
-
from .i18n import _
from . import (
error,
match as matchmod,
merge,
repository,
+ scmutil,
sparse,
util,
)
@@ -144,15 +143,9 @@
return includepats, excludepats
def load(repo):
- try:
- spec = repo.svfs.read(FILENAME)
- except IOError as e:
- # Treat "narrowspec does not exist" the same as "narrowspec file exists
- # and is empty".
- if e.errno == errno.ENOENT:
- return set(), set()
- raise
-
+ # Treat "narrowspec does not exist" the same as "narrowspec file exists
+ # and is empty".
+ spec = repo.svfs.tryread(FILENAME)
return parseconfig(repo.ui, spec)
def save(repo, includepats, excludepats):
@@ -266,9 +259,12 @@
if not repo.wvfs.exists(f):
addgaction((f, (mf.flags(f), False), "narrowspec updated"))
merge.applyupdates(repo, actions, wctx=repo[None],
- mctx=repo['.'], overwrite=False)
+ mctx=repo['.'], overwrite=False, wantfiledata=False)
def checkworkingcopynarrowspec(repo):
+ # Avoid infinite recursion when updating the working copy
+ if getattr(repo, '_updatingnarrowspec', False):
+ return
storespec = repo.svfs.tryread(FILENAME)
wcspec = repo.vfs.tryread(DIRSTATE_FILENAME)
if wcspec != storespec:
@@ -283,6 +279,7 @@
"""
oldspec = repo.vfs.tryread(DIRSTATE_FILENAME)
newspec = repo.svfs.tryread(FILENAME)
+ repo._updatingnarrowspec = True
oldincludes, oldexcludes = parseconfig(repo.ui, oldspec)
newincludes, newexcludes = parseconfig(repo.ui, newspec)
@@ -292,8 +289,8 @@
removedmatch = matchmod.differencematcher(oldmatch, newmatch)
ds = repo.dirstate
- lookup, status = ds.status(removedmatch, subrepos=[], ignored=False,
- clean=True, unknown=False)
+ lookup, status = ds.status(removedmatch, subrepos=[], ignored=True,
+ clean=True, unknown=True)
trackeddirty = status.modified + status.added
clean = status.clean
if assumeclean:
@@ -302,15 +299,19 @@
else:
trackeddirty.extend(lookup)
_deletecleanfiles(repo, clean)
+ uipathfn = scmutil.getuipathfn(repo)
for f in sorted(trackeddirty):
- repo.ui.status(_('not deleting possibly dirty file %s\n') % f)
+ repo.ui.status(_('not deleting possibly dirty file %s\n') % uipathfn(f))
+ for f in sorted(status.unknown):
+ repo.ui.status(_('not deleting unknown file %s\n') % uipathfn(f))
+ for f in sorted(status.ignored):
+ repo.ui.status(_('not deleting ignored file %s\n') % uipathfn(f))
for f in clean + trackeddirty:
ds.drop(f)
- repo.narrowpats = newincludes, newexcludes
- repo._narrowmatch = newmatch
pctx = repo['.']
newfiles = [f for f in pctx.manifest().walk(addedmatch) if f not in ds]
for f in newfiles:
ds.normallookup(f)
_writeaddedfiles(repo, pctx, newfiles)
+ repo._updatingnarrowspec = False
--- a/mercurial/obsolete.py Tue Jul 09 10:07:35 2019 -0400
+++ b/mercurial/obsolete.py Mon Jul 22 14:00:33 2019 -0400
@@ -93,10 +93,6 @@
_calcsize = struct.calcsize
propertycache = util.propertycache
-# the obsolete feature is not mature enough to be enabled by default.
-# you have to rely on third party extension extension to enable this.
-_enabled = False
-
# Options for obsolescence
createmarkersopt = 'createmarkers'
allowunstableopt = 'allowunstable'
@@ -124,11 +120,6 @@
if 'all' in result:
return True
- # For migration purposes, temporarily return true if the config hasn't
- # been set but _enabled is true.
- if len(result) == 0 and _enabled:
- return True
-
# Temporary hack for next check
newconfig = repo.ui.config('experimental', 'evolution.createmarkers')
if newconfig:
--- a/mercurial/patch.py Tue Jul 09 10:07:35 2019 -0400
+++ b/mercurial/patch.py Mon Jul 22 14:00:33 2019 -0400
@@ -1089,7 +1089,9 @@
return skipfile, skipfile, skipall, newpatches
while True:
resps = messages['help'][operation]
- r = ui.promptchoice("%s %s" % (query, resps))
+ # IMPORTANT: keep the last line of this prompt short (<40 english
+ # chars is a good target) because of issue6158.
+ r = ui.promptchoice("%s\n(enter ? for help) %s" % (query, resps))
ui.write("\n")
if r == 8: # ?
for c, t in ui.extractchoices(resps)[1]:
--- a/mercurial/policy.py Tue Jul 09 10:07:35 2019 -0400
+++ b/mercurial/policy.py Mon Jul 22 14:00:33 2019 -0400
@@ -13,6 +13,9 @@
# Rules for how modules can be loaded. Values are:
#
# c - require C extensions
+# rust+c - require Rust and C extensions
+# rust+c-allow - allow Rust and C extensions with fallback to pure Python
+# for each
# allow - allow pure Python implementation when C loading fails
# cffi - required cffi versions (implemented within pure module)
# cffi-allow - allow pure Python implementation if cffi version is missing
@@ -29,6 +32,9 @@
b'cffi': (r'cffi', None),
b'cffi-allow': (r'cffi', r'pure'),
b'py': (None, r'pure'),
+ # For now, rust policies impact importrust only
+ b'rust+c': (r'cext', None),
+ b'rust+c-allow': (r'cext', r'pure'),
}
try:
@@ -69,7 +75,7 @@
(r'cext', r'bdiff'): 3,
(r'cext', r'mpatch'): 1,
(r'cext', r'osutil'): 4,
- (r'cext', r'parsers'): 12,
+ (r'cext', r'parsers'): 13,
}
# map import request to other package or module
@@ -107,3 +113,34 @@
raise
pn, mn = _modredirects.get((purepkg, modname), (purepkg, modname))
return _importfrom(pn, mn)
+
+def _isrustpermissive():
+ """Assuming the policy is a Rust one, tell if it's permissive."""
+ return policy.endswith(b'-allow')
+
+def importrust(modname, member=None, default=None):
+ """Import Rust module according to policy and availability.
+
+ If policy isn't a Rust one, this returns `default`.
+
+ If either the module or its member is not available, this returns `default`
+ if policy is permissive and raises `ImportError` if not.
+ """
+ if not policy.startswith(b'rust'):
+ return default
+
+ try:
+ mod = _importfrom(r'rustext', modname)
+ except ImportError:
+ if _isrustpermissive():
+ return default
+ raise
+ if member is None:
+ return mod
+
+ try:
+ return getattr(mod, member)
+ except AttributeError:
+ if _isrustpermissive():
+ return default
+ raise ImportError(r"Cannot import name %s" % member)
--- a/mercurial/profiling.py Tue Jul 09 10:07:35 2019 -0400
+++ b/mercurial/profiling.py Mon Jul 22 14:00:33 2019 -0400
@@ -147,6 +147,8 @@
# inconsistent config: profiling.showmin
limit = ui.configwith(fraction, 'profiling', 'showmin', 0.05)
kwargs[r'limit'] = limit
+ showtime = ui.configbool('profiling', 'showtime')
+ kwargs[r'showtime'] = showtime
statprof.display(fp, data=data, format=displayformat, **kwargs)
--- a/mercurial/pure/osutil.py Tue Jul 09 10:07:35 2019 -0400
+++ b/mercurial/pure/osutil.py Mon Jul 22 14:00:33 2019 -0400
@@ -5,7 +5,7 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
+from __future__ import absolute_import, division
import ctypes
import ctypes.util
@@ -149,7 +149,7 @@
cmsg.cmsg_type != _SCM_RIGHTS):
return []
rfds = ctypes.cast(cmsg.cmsg_data, ctypes.POINTER(ctypes.c_int))
- rfdscount = ((cmsg.cmsg_len - _cmsghdr.cmsg_data.offset) /
+ rfdscount = ((cmsg.cmsg_len - _cmsghdr.cmsg_data.offset) //
ctypes.sizeof(ctypes.c_int))
return [rfds[i] for i in pycompat.xrange(rfdscount)]
--- a/mercurial/registrar.py Tue Jul 09 10:07:35 2019 -0400
+++ b/mercurial/registrar.py Mon Jul 22 14:00:33 2019 -0400
@@ -64,8 +64,8 @@
raise error.ProgrammingError(msg)
if func.__doc__ and not util.safehasattr(func, '_origdoc'):
- doc = pycompat.sysbytes(func.__doc__).strip()
- func._origdoc = doc
+ func._origdoc = func.__doc__.strip()
+ doc = pycompat.sysbytes(func._origdoc)
func.__doc__ = pycompat.sysstr(self._formatdoc(decl, doc))
self._table[name] = func
@@ -338,19 +338,10 @@
'''
pass
- # old API (DEPRECATED)
- @templatekeyword('mykeyword')
- def mykeywordfunc(repo, ctx, templ, cache, revcache, **args):
- '''Explanation of this template keyword ....
- '''
- pass
-
The first string argument is used also in online help.
Optional argument 'requires' should be a collection of resource names
- which the template keyword depends on. This also serves as a flag to
- switch to the new API. If 'requires' is unspecified, all template
- keywords and resources are expanded to the function arguments.
+ which the template keyword depends on.
'templatekeyword' instance in example above can be used to
decorate multiple functions.
@@ -362,7 +353,7 @@
Otherwise, explicit 'templatekw.loadkeyword()' is needed.
"""
- def _extrasetup(self, name, func, requires=None):
+ def _extrasetup(self, name, func, requires=()):
func._requires = requires
class templatefilter(_templateregistrarbase):
--- a/mercurial/repair.py Tue Jul 09 10:07:35 2019 -0400
+++ b/mercurial/repair.py Mon Jul 22 14:00:33 2019 -0400
@@ -279,7 +279,9 @@
if rev in tostrip:
updatebm.append(m)
newbmtarget = None
- if updatebm: # don't compute anything is there is no bookmark to move anyway
+ # If we need to move bookmarks, compute bookmark
+ # targets. Otherwise we can skip doing this logic.
+ if updatebm:
# For a set s, max(parents(s) - s) is the same as max(heads(::s - s)),
# but is much faster
newbmtarget = repo.revs('max(parents(%ld) - (%ld))', tostrip, tostrip)
@@ -364,8 +366,9 @@
striptrees(repo, tr, striprev, files)
def striptrees(repo, tr, striprev, files):
- if 'treemanifest' in repo.requirements: # safe but unnecessary
- # otherwise
+ if 'treemanifest' in repo.requirements:
+ # This logic is safe if treemanifest isn't enabled, but also
+ # pointless, so we skip it if treemanifest isn't enabled.
for unencoded, encoded, size in repo.store.datafiles():
if (unencoded.startswith('meta/') and
unencoded.endswith('00manifest.i')):
@@ -416,7 +419,9 @@
progress.complete()
- if 'treemanifest' in repo.requirements: # safe but unnecessary otherwise
+ if 'treemanifest' in repo.requirements:
+ # This logic is safe if treemanifest isn't enabled, but also
+ # pointless, so we skip it if treemanifest isn't enabled.
for dir in util.dirs(seenfiles):
i = 'meta/%s/00manifest.i' % dir
d = 'meta/%s/00manifest.d' % dir
--- a/mercurial/repository.py Tue Jul 09 10:07:35 2019 -0400
+++ b/mercurial/repository.py Mon Jul 22 14:00:33 2019 -0400
@@ -291,6 +291,10 @@
class ipeerrequests(interfaceutil.Interface):
"""Interface for executing commands on a peer."""
+ limitedarguments = interfaceutil.Attribute(
+ """True if the peer cannot receive large argument value for commands."""
+ )
+
def commandexecutor():
"""A context manager that resolves to an ipeercommandexecutor.
@@ -329,6 +333,8 @@
class peer(object):
"""Base class for peer repositories."""
+ limitedarguments = False
+
def capable(self, name):
caps = self.capabilities()
if name in caps:
@@ -1650,7 +1656,7 @@
editor=False, extra=None):
"""Add a new revision to the repository."""
- def commitctx(ctx, error=False):
+ def commitctx(ctx, error=False, origctx=None):
"""Commit a commitctx instance to the repository."""
def destroying():
--- a/mercurial/repoview.py Tue Jul 09 10:07:35 2019 -0400
+++ b/mercurial/repoview.py Mon Jul 22 14:00:33 2019 -0400
@@ -17,6 +17,10 @@
phases,
pycompat,
tags as tagsmod,
+ util,
+)
+from .utils import (
+ repoviewutil,
)
def hideablerevs(repo):
@@ -154,6 +158,35 @@
'immutable': computemutable,
'base': computeimpactable}
+_basefiltername = list(filtertable)
+
+def extrafilter(ui):
+ """initialize extra filter and return its id
+
+ If extra filtering is configured, we make sure the associated filtered view
+ are declared and return the associated id.
+ """
+ frevs = ui.config('experimental', 'extra-filter-revs')
+ if frevs is None:
+ return None
+
+ fid = pycompat.sysbytes(util.DIGESTS['sha1'](frevs).hexdigest())[:12]
+
+ combine = lambda fname: fname + '%' + fid
+
+ subsettable = repoviewutil.subsettable
+
+ if combine('base') not in filtertable:
+ for name in _basefiltername:
+ def extrafilteredrevs(repo, *args, **kwargs):
+ baserevs = filtertable[name](repo, *args, **kwargs)
+ extrarevs = frozenset(repo.revs(frevs))
+ return baserevs | extrarevs
+ filtertable[combine(name)] = extrafilteredrevs
+ if name in subsettable:
+ subsettable[combine(name)] = combine(subsettable[name])
+ return fid
+
def filterrevs(repo, filtername, visibilityexceptions=None):
"""returns set of filtered revision for this filter name
--- a/mercurial/revlog.py Tue Jul 09 10:07:35 2019 -0400
+++ b/mercurial/revlog.py Mon Jul 22 14:00:33 2019 -0400
@@ -16,6 +16,7 @@
import collections
import contextlib
import errno
+import io
import os
import struct
import zlib
@@ -97,11 +98,8 @@
REVIDX_RAWTEXT_CHANGING_FLAGS
parsers = policy.importmod(r'parsers')
-try:
- from . import rustext
- rustext.__name__ # force actual import (see hgdemandimport)
-except ImportError:
- rustext = None
+rustancestor = policy.importrust(r'ancestor')
+rustdagop = policy.importrust(r'dagop')
# Aliased for performance.
_zlibdecompress = zlib.decompress
@@ -337,15 +335,21 @@
configured threshold.
If censorable is True, the revlog can have censored revisions.
+
+ If `upperboundcomp` is not None, this is the expected maximal gain from
+ compression for the data content.
"""
def __init__(self, opener, indexfile, datafile=None, checkambig=False,
- mmaplargeindex=False, censorable=False):
+ mmaplargeindex=False, censorable=False,
+ upperboundcomp=None):
"""
create a revlog object
opener is a function that abstracts the file opening operation
and can be used to implement COW semantics or the like.
+
"""
+ self.upperboundcomp = upperboundcomp
self.indexfile = indexfile
self.datafile = datafile or (indexfile[:-2] + ".d")
self.opener = opener
@@ -825,8 +829,8 @@
checkrev(r)
# and we're sure ancestors aren't filtered as well
- if rustext is not None:
- lazyancestors = rustext.ancestor.LazyAncestors
+ if rustancestor is not None:
+ lazyancestors = rustancestor.LazyAncestors
arg = self.index
elif util.safehasattr(parsers, 'rustlazyancestors'):
lazyancestors = ancestor.rustlazyancestors
@@ -915,8 +919,8 @@
if common is None:
common = [nullrev]
- if rustext is not None:
- return rustext.ancestor.MissingAncestors(self.index, common)
+ if rustancestor is not None:
+ return rustancestor.MissingAncestors(self.index, common)
return ancestor.incrementalmissingancestors(self.parentrevs, common)
def findmissingrevs(self, common=None, heads=None):
@@ -1130,8 +1134,8 @@
return self.index.headrevs()
except AttributeError:
return self._headrevs()
- if rustext is not None:
- return rustext.dagop.headrevs(self.index, revs)
+ if rustdagop is not None:
+ return rustdagop.headrevs(self.index, revs)
return dagop.headrevs(revs, self._uncheckedparentrevs)
def computephases(self, roots):
@@ -1216,14 +1220,25 @@
A revision is considered an ancestor of itself.
The implementation of this is trivial but the use of
- commonancestorsheads is not."""
+ reachableroots is not."""
if a == nullrev:
return True
elif a == b:
return True
elif a > b:
return False
- return a in self._commonancestorsheads(a, b)
+ return bool(self.reachableroots(a, [b], [a], includepath=False))
+
+ def reachableroots(self, minroot, heads, roots, includepath=False):
+ """return (heads(::<roots> and <roots>::<heads>))
+
+ If includepath is True, return (<roots>::<heads>)."""
+ try:
+ return self.index.reachableroots2(minroot, heads, roots,
+ includepath)
+ except AttributeError:
+ return dagop._reachablerootspure(self.parentrevs,
+ minroot, roots, heads, includepath)
def ancestor(self, a, b):
"""calculate the "best" common ancestor of nodes a and b"""
@@ -1340,13 +1355,13 @@
"""Find the shortest unambiguous prefix that matches node."""
def isvalid(prefix):
try:
- node = self._partialmatch(prefix)
+ matchednode = self._partialmatch(prefix)
except error.AmbiguousPrefixLookupError:
return False
except error.WdirUnsupported:
# single 'ff...' match
return True
- if node is None:
+ if matchednode is None:
raise error.LookupError(node, self.indexfile, _('no node'))
return True
@@ -2292,7 +2307,7 @@
try:
with self._datafp() as f:
- f.seek(0, 2)
+ f.seek(0, io.SEEK_END)
actual = f.tell()
dd = actual - expected
except IOError as inst:
@@ -2302,7 +2317,7 @@
try:
f = self.opener(self.indexfile)
- f.seek(0, 2)
+ f.seek(0, io.SEEK_END)
actual = f.tell()
f.close()
s = self._io.size
--- a/mercurial/revlogutils/deltas.py Tue Jul 09 10:07:35 2019 -0400
+++ b/mercurial/revlogutils/deltas.py Mon Jul 22 14:00:33 2019 -0400
@@ -679,6 +679,31 @@
# if chain already have too much data, skip base
if deltas_limit < chainsize:
continue
+ if sparse and revlog.upperboundcomp is not None:
+ maxcomp = revlog.upperboundcomp
+ basenotsnap = (p1, p2, nullrev)
+ if rev not in basenotsnap and revlog.issnapshot(rev):
+ snapshotdepth = revlog.snapshotdepth(rev)
+ # If text is significantly larger than the base, we can
+ # expect the resulting delta to be proportional to the size
+ # difference
+ revsize = revlog.rawsize(rev)
+ rawsizedistance = max(textlen - revsize, 0)
+ # use an estimate of the compression upper bound.
+ lowestrealisticdeltalen = rawsizedistance // maxcomp
+
+ # check the absolute constraint on the delta size
+ snapshotlimit = textlen >> snapshotdepth
+ if snapshotlimit < lowestrealisticdeltalen:
+ # delta lower bound is larger than accepted upper bound
+ continue
+
+ # check the relative constraint on the delta size
+ revlength = revlog.length(rev)
+ if revlength < lowestrealisticdeltalen:
+ # delta probable lower bound is larger than target base
+ continue
+
group.append(rev)
if group:
# XXX: in the sparse revlog case, group can become large,
@@ -907,6 +932,21 @@
def _builddeltainfo(self, revinfo, base, fh):
# can we use the cached delta?
+ revlog = self.revlog
+ chainbase = revlog.chainbase(base)
+ if revlog._generaldelta:
+ deltabase = base
+ else:
+ deltabase = chainbase
+ snapshotdepth = None
+ if revlog._sparserevlog and deltabase == nullrev:
+ snapshotdepth = 0
+ elif revlog._sparserevlog and revlog.issnapshot(deltabase):
+ # A delta chain should always be one full snapshot,
+ # zero or more semi-snapshots, and zero or more deltas
+ p1, p2 = revlog.rev(revinfo.p1), revlog.rev(revinfo.p2)
+ if deltabase not in (p1, p2) and revlog.issnapshot(deltabase):
+ snapshotdepth = len(revlog._deltachain(deltabase)[0])
delta = None
if revinfo.cachedelta:
cachebase, cachediff = revinfo.cachedelta
@@ -920,31 +960,22 @@
delta = revinfo.cachedelta[1]
if delta is None:
delta = self._builddeltadiff(base, revinfo, fh)
- revlog = self.revlog
+ # snapshotdept need to be neither None nor 0 level snapshot
+ if revlog.upperboundcomp is not None and snapshotdepth:
+ lowestrealisticdeltalen = len(delta) // revlog.upperboundcomp
+ snapshotlimit = revinfo.textlen >> snapshotdepth
+ if snapshotlimit < lowestrealisticdeltalen:
+ return None
+ if revlog.length(base) < lowestrealisticdeltalen:
+ return None
header, data = revlog.compress(delta)
deltalen = len(header) + len(data)
- chainbase = revlog.chainbase(base)
offset = revlog.end(len(revlog) - 1)
dist = deltalen + offset - revlog.start(chainbase)
- if revlog._generaldelta:
- deltabase = base
- else:
- deltabase = chainbase
chainlen, compresseddeltalen = revlog._chaininfo(base)
chainlen += 1
compresseddeltalen += deltalen
- revlog = self.revlog
- snapshotdepth = None
- if deltabase == nullrev:
- snapshotdepth = 0
- elif revlog._sparserevlog and revlog.issnapshot(deltabase):
- # A delta chain should always be one full snapshot,
- # zero or more semi-snapshots, and zero or more deltas
- p1, p2 = revlog.rev(revinfo.p1), revlog.rev(revinfo.p2)
- if deltabase not in (p1, p2) and revlog.issnapshot(deltabase):
- snapshotdepth = len(revlog._deltachain(deltabase)[0])
-
return _deltainfo(dist, deltalen, (header, data), deltabase,
chainbase, chainlen, compresseddeltalen,
snapshotdepth)
@@ -1002,8 +1033,9 @@
nominateddeltas.append(deltainfo)
for candidaterev in candidaterevs:
candidatedelta = self._builddeltainfo(revinfo, candidaterev, fh)
- if isgooddeltainfo(self.revlog, candidatedelta, revinfo):
- nominateddeltas.append(candidatedelta)
+ if candidatedelta is not None:
+ if isgooddeltainfo(self.revlog, candidatedelta, revinfo):
+ nominateddeltas.append(candidatedelta)
if nominateddeltas:
deltainfo = min(nominateddeltas, key=lambda x: x.deltalen)
if deltainfo is not None:
--- a/mercurial/revset.py Tue Jul 09 10:07:35 2019 -0400
+++ b/mercurial/revset.py Mon Jul 22 14:00:33 2019 -0400
@@ -52,6 +52,9 @@
spanset = smartset.spanset
fullreposet = smartset.fullreposet
+# revisions not included in all(), but populated if specified
+_virtualrevs = (node.nullrev, node.wdirrev)
+
# Constants for ordering requirement, used in getset():
#
# If 'define', any nested functions and operations MAY change the ordering of
@@ -120,8 +123,7 @@
if not x:
raise error.ParseError(_("empty string is not a valid revision"))
x = scmutil.intrev(scmutil.revsymbol(repo, x))
- if (x in subset
- or x == node.nullrev and isinstance(subset, fullreposet)):
+ if x in subset or x in _virtualrevs and isinstance(subset, fullreposet):
return baseset([x])
return baseset()
@@ -1359,8 +1361,13 @@
# i18n: "merge" is a keyword
getargs(x, 0, 0, _("merge takes no arguments"))
cl = repo.changelog
- return subset.filter(lambda r: cl.parentrevs(r)[1] != -1,
- condrepr='<merge>')
+ nullrev = node.nullrev
+ def ismerge(r):
+ try:
+ return cl.parentrevs(r)[1] != nullrev
+ except error.WdirUnsupported:
+ return bool(repo[r].p2())
+ return subset.filter(ismerge, condrepr='<merge>')
@predicate('branchpoint()', safe=True)
def branchpoint(repo, subset, x):
@@ -1847,7 +1854,7 @@
except (TypeError, ValueError):
# i18n: "rev" is a keyword
raise error.ParseError(_("rev expects a number"))
- if l not in repo.changelog and l not in (node.nullrev, node.wdirrev):
+ if l not in repo.changelog and l not in _virtualrevs:
return baseset()
return subset & baseset([l])
@@ -2262,7 +2269,7 @@
if r in seen:
continue
if (r in subset
- or r == node.nullrev and isinstance(subset, fullreposet)):
+ or r in _virtualrevs and isinstance(subset, fullreposet)):
ls.append(r)
seen.add(r)
return baseset(ls)
--- a/mercurial/scmutil.py Tue Jul 09 10:07:35 2019 -0400
+++ b/mercurial/scmutil.py Mon Jul 22 14:00:33 2019 -0400
@@ -1247,6 +1247,28 @@
return getrenamed
+def getcopiesfn(repo, endrev=None):
+ if copiesmod.usechangesetcentricalgo(repo):
+ def copiesfn(ctx):
+ if ctx.p2copies():
+ allcopies = ctx.p1copies().copy()
+ # There should be no overlap
+ allcopies.update(ctx.p2copies())
+ return sorted(allcopies.items())
+ else:
+ return sorted(ctx.p1copies().items())
+ else:
+ getrenamed = getrenamedfn(repo, endrev)
+ def copiesfn(ctx):
+ copies = []
+ for fn in ctx.files():
+ rename = getrenamed(fn, ctx.rev())
+ if rename:
+ copies.append((fn, rename))
+ return copies
+
+ return copiesfn
+
def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
"""Update the dirstate to reflect the intent of copying src to dst. For
different reasons it might not end with dst being marked as copied from src.
@@ -1519,7 +1541,12 @@
pass # we ignore data for nodes that don't exist locally
finally:
if proc:
- proc.communicate()
+ try:
+ proc.communicate()
+ except ValueError:
+ # This happens if we started iterating src and then
+ # get a parse error on a line. It should be safe to ignore.
+ pass
if src:
src.close()
if proc and proc.returncode != 0:
--- a/mercurial/setdiscovery.py Tue Jul 09 10:07:35 2019 -0400
+++ b/mercurial/setdiscovery.py Mon Jul 22 14:00:33 2019 -0400
@@ -110,22 +110,23 @@
(all tracked revisions are known locally)
"""
- def __init__(self, repo, targetheads):
+ def __init__(self, repo, targetheads, respectsize):
self._repo = repo
self._targetheads = targetheads
self._common = repo.changelog.incrementalmissingrevs()
self._undecided = None
self.missing = set()
self._childrenmap = None
+ self._respectsize = respectsize
def addcommons(self, commons):
- """registrer nodes known as common"""
+ """register nodes known as common"""
self._common.addbases(commons)
if self._undecided is not None:
self._common.removeancestorsfrom(self._undecided)
def addmissings(self, missings):
- """registrer some nodes as missing"""
+ """register some nodes as missing"""
newmissing = self._repo.revs('%ld::%ld', missings, self.undecided)
if newmissing:
self.missing.update(newmissing)
@@ -241,11 +242,13 @@
# update from roots
revsroots = set(repo.revs('roots(%ld)', revs))
-
childrenrevs = self._childrengetter()
-
_updatesample(revs, revsroots, sample, childrenrevs)
assert sample
+
+ if not self._respectsize:
+ size = max(size, min(len(revsroots), len(revsheads)))
+
sample = _limitsample(sample, size)
if len(sample) < size:
more = size - len(sample)
@@ -256,7 +259,8 @@
initialsamplesize=100,
fullsamplesize=200,
abortwhenunrelated=True,
- ancestorsof=None):
+ ancestorsof=None,
+ samplegrowth=1.05):
'''Return a tuple (common, anyincoming, remoteheads) used to identify
missing nodes from or in remote.
'''
@@ -275,9 +279,63 @@
# early exit if we know all the specified remote heads already
ui.debug("query 1; heads\n")
roundtrips += 1
- sample = _limitsample(ownheads, initialsamplesize)
- # indices between sample and externalized version must match
- sample = list(sample)
+ # We also ask remote about all the local heads. That set can be arbitrarily
+ # large, so we used to limit it size to `initialsamplesize`. We no longer
+ # do as it proved counter productive. The skipped heads could lead to a
+ # large "undecided" set, slower to be clarified than if we asked the
+ # question for all heads right away.
+ #
+ # We are already fetching all server heads using the `heads` commands,
+ # sending a equivalent number of heads the other way should not have a
+ # significant impact. In addition, it is very likely that we are going to
+ # have to issue "known" request for an equivalent amount of revisions in
+ # order to decide if theses heads are common or missing.
+ #
+ # find a detailled analysis below.
+ #
+ # Case A: local and server both has few heads
+ #
+ # Ownheads is below initialsamplesize, limit would not have any effect.
+ #
+ # Case B: local has few heads and server has many
+ #
+ # Ownheads is below initialsamplesize, limit would not have any effect.
+ #
+ # Case C: local and server both has many heads
+ #
+ # We now transfert some more data, but not significantly more than is
+ # already transfered to carry the server heads.
+ #
+ # Case D: local has many heads, server has few
+ #
+ # D.1 local heads are mostly known remotely
+ #
+ # All the known head will have be part of a `known` request at some
+ # point for the discovery to finish. Sending them all earlier is
+ # actually helping.
+ #
+ # (This case is fairly unlikely, it requires the numerous heads to all
+ # be merged server side in only a few heads)
+ #
+ # D.2 local heads are mostly missing remotely
+ #
+ # To determine that the heads are missing, we'll have to issue `known`
+ # request for them or one of their ancestors. This amount of `known`
+ # request will likely be in the same order of magnitude than the amount
+ # of local heads.
+ #
+ # The only case where we can be more efficient using `known` request on
+ # ancestors are case were all the "missing" local heads are based on a
+ # few changeset, also "missing". This means we would have a "complex"
+ # graph (with many heads) attached to, but very independant to a the
+ # "simple" graph on the server. This is a fairly usual case and have
+ # not been met in the wild so far.
+ if remote.limitedarguments:
+ sample = _limitsample(ownheads, initialsamplesize)
+ # indices between sample and externalized version must match
+ sample = list(sample)
+ else:
+ sample = ownheads
with remote.commandexecutor() as e:
fheads = e.callcommand('heads', {})
@@ -318,7 +376,7 @@
# full blown discovery
- disco = partialdiscovery(local, ownheads)
+ disco = partialdiscovery(local, ownheads, remote.limitedarguments)
# treat remote heads (and maybe own heads) as a first implicit sample
# response
disco.addcommons(knownsrvheads)
@@ -335,6 +393,8 @@
ui.debug("taking initial sample\n")
samplefunc = disco.takefullsample
targetsize = fullsamplesize
+ if not remote.limitedarguments:
+ fullsamplesize = int(fullsamplesize * samplegrowth)
else:
# use even cheaper initial sample
ui.debug("taking quick initial sample\n")
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/shelve.py Mon Jul 22 14:00:33 2019 -0400
@@ -0,0 +1,1002 @@
+# shelve.py - save/restore working directory state
+#
+# Copyright 2013 Facebook, Inc.
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+
+"""save and restore changes to the working directory
+
+The "hg shelve" command saves changes made to the working directory
+and reverts those changes, resetting the working directory to a clean
+state.
+
+Later on, the "hg unshelve" command restores the changes saved by "hg
+shelve". Changes can be restored even after updating to a different
+parent, in which case Mercurial's merge machinery will resolve any
+conflicts if necessary.
+
+You can have more than one shelved change outstanding at a time; each
+shelved change has a distinct name. For details, see the help for "hg
+shelve".
+"""
+from __future__ import absolute_import
+
+import collections
+import errno
+import itertools
+import stat
+
+from .i18n import _
+from . import (
+ bookmarks,
+ bundle2,
+ bundlerepo,
+ changegroup,
+ cmdutil,
+ discovery,
+ error,
+ exchange,
+ hg,
+ lock as lockmod,
+ mdiff,
+ merge,
+ node as nodemod,
+ patch,
+ phases,
+ pycompat,
+ repair,
+ scmutil,
+ templatefilters,
+ util,
+ vfs as vfsmod,
+)
+from .utils import (
+ dateutil,
+ stringutil,
+)
+
+backupdir = 'shelve-backup'
+shelvedir = 'shelved'
+shelvefileextensions = ['hg', 'patch', 'shelve']
+# universal extension is present in all types of shelves
+patchextension = 'patch'
+
+# we never need the user, so we use a
+# generic user for all shelve operations
+shelveuser = 'shelve@localhost'
+
+class shelvedfile(object):
+ """Helper for the file storing a single shelve
+
+ Handles common functions on shelve files (.hg/.patch) using
+ the vfs layer"""
+ def __init__(self, repo, name, filetype=None):
+ self.repo = repo
+ self.name = name
+ self.vfs = vfsmod.vfs(repo.vfs.join(shelvedir))
+ self.backupvfs = vfsmod.vfs(repo.vfs.join(backupdir))
+ self.ui = self.repo.ui
+ if filetype:
+ self.fname = name + '.' + filetype
+ else:
+ self.fname = name
+
+ def exists(self):
+ return self.vfs.exists(self.fname)
+
+ def filename(self):
+ return self.vfs.join(self.fname)
+
+ def backupfilename(self):
+ def gennames(base):
+ yield base
+ base, ext = base.rsplit('.', 1)
+ for i in itertools.count(1):
+ yield '%s-%d.%s' % (base, i, ext)
+
+ name = self.backupvfs.join(self.fname)
+ for n in gennames(name):
+ if not self.backupvfs.exists(n):
+ return n
+
+ def movetobackup(self):
+ if not self.backupvfs.isdir():
+ self.backupvfs.makedir()
+ util.rename(self.filename(), self.backupfilename())
+
+ def stat(self):
+ return self.vfs.stat(self.fname)
+
+ def opener(self, mode='rb'):
+ try:
+ return self.vfs(self.fname, mode)
+ except IOError as err:
+ if err.errno != errno.ENOENT:
+ raise
+ raise error.Abort(_("shelved change '%s' not found") % self.name)
+
+ def applybundle(self, tr):
+ fp = self.opener()
+ try:
+ targetphase = phases.internal
+ if not phases.supportinternal(self.repo):
+ targetphase = phases.secret
+ gen = exchange.readbundle(self.repo.ui, fp, self.fname, self.vfs)
+ pretip = self.repo['tip']
+ bundle2.applybundle(self.repo, gen, tr,
+ source='unshelve',
+ url='bundle:' + self.vfs.join(self.fname),
+ targetphase=targetphase)
+ shelvectx = self.repo['tip']
+ if pretip == shelvectx:
+ shelverev = tr.changes['revduplicates'][-1]
+ shelvectx = self.repo[shelverev]
+ return shelvectx
+ finally:
+ fp.close()
+
+ def bundlerepo(self):
+ path = self.vfs.join(self.fname)
+ return bundlerepo.instance(self.repo.baseui,
+ 'bundle://%s+%s' % (self.repo.root, path))
+
+ def writebundle(self, bases, node):
+ cgversion = changegroup.safeversion(self.repo)
+ if cgversion == '01':
+ btype = 'HG10BZ'
+ compression = None
+ else:
+ btype = 'HG20'
+ compression = 'BZ'
+
+ repo = self.repo.unfiltered()
+
+ outgoing = discovery.outgoing(repo, missingroots=bases,
+ missingheads=[node])
+ cg = changegroup.makechangegroup(repo, outgoing, cgversion, 'shelve')
+
+ bundle2.writebundle(self.ui, cg, self.fname, btype, self.vfs,
+ compression=compression)
+
+ def writeinfo(self, info):
+ scmutil.simplekeyvaluefile(self.vfs, self.fname).write(info)
+
+ def readinfo(self):
+ return scmutil.simplekeyvaluefile(self.vfs, self.fname).read()
+
+class shelvedstate(object):
+ """Handle persistence during unshelving operations.
+
+ Handles saving and restoring a shelved state. Ensures that different
+ versions of a shelved state are possible and handles them appropriately.
+ """
+ _version = 2
+ _filename = 'shelvedstate'
+ _keep = 'keep'
+ _nokeep = 'nokeep'
+ # colon is essential to differentiate from a real bookmark name
+ _noactivebook = ':no-active-bookmark'
+
+ @classmethod
+ def _verifyandtransform(cls, d):
+ """Some basic shelvestate syntactic verification and transformation"""
+ try:
+ d['originalwctx'] = nodemod.bin(d['originalwctx'])
+ d['pendingctx'] = nodemod.bin(d['pendingctx'])
+ d['parents'] = [nodemod.bin(h)
+ for h in d['parents'].split(' ')]
+ d['nodestoremove'] = [nodemod.bin(h)
+ for h in d['nodestoremove'].split(' ')]
+ except (ValueError, TypeError, KeyError) as err:
+ raise error.CorruptedState(pycompat.bytestr(err))
+
+ @classmethod
+ def _getversion(cls, repo):
+ """Read version information from shelvestate file"""
+ fp = repo.vfs(cls._filename)
+ try:
+ version = int(fp.readline().strip())
+ except ValueError as err:
+ raise error.CorruptedState(pycompat.bytestr(err))
+ finally:
+ fp.close()
+ return version
+
+ @classmethod
+ def _readold(cls, repo):
+ """Read the old position-based version of a shelvestate file"""
+ # Order is important, because old shelvestate file uses it
+ # to detemine values of fields (i.g. name is on the second line,
+ # originalwctx is on the third and so forth). Please do not change.
+ keys = ['version', 'name', 'originalwctx', 'pendingctx', 'parents',
+ 'nodestoremove', 'branchtorestore', 'keep', 'activebook']
+ # this is executed only seldomly, so it is not a big deal
+ # that we open this file twice
+ fp = repo.vfs(cls._filename)
+ d = {}
+ try:
+ for key in keys:
+ d[key] = fp.readline().strip()
+ finally:
+ fp.close()
+ return d
+
+ @classmethod
+ def load(cls, repo):
+ version = cls._getversion(repo)
+ if version < cls._version:
+ d = cls._readold(repo)
+ elif version == cls._version:
+ d = scmutil.simplekeyvaluefile(
+ repo.vfs, cls._filename).read(firstlinenonkeyval=True)
+ else:
+ raise error.Abort(_('this version of shelve is incompatible '
+ 'with the version used in this repo'))
+
+ cls._verifyandtransform(d)
+ try:
+ obj = cls()
+ obj.name = d['name']
+ obj.wctx = repo[d['originalwctx']]
+ obj.pendingctx = repo[d['pendingctx']]
+ obj.parents = d['parents']
+ obj.nodestoremove = d['nodestoremove']
+ obj.branchtorestore = d.get('branchtorestore', '')
+ obj.keep = d.get('keep') == cls._keep
+ obj.activebookmark = ''
+ if d.get('activebook', '') != cls._noactivebook:
+ obj.activebookmark = d.get('activebook', '')
+ except (error.RepoLookupError, KeyError) as err:
+ raise error.CorruptedState(pycompat.bytestr(err))
+
+ return obj
+
+ @classmethod
+ def save(cls, repo, name, originalwctx, pendingctx, nodestoremove,
+ branchtorestore, keep=False, activebook=''):
+ info = {
+ "name": name,
+ "originalwctx": nodemod.hex(originalwctx.node()),
+ "pendingctx": nodemod.hex(pendingctx.node()),
+ "parents": ' '.join([nodemod.hex(p)
+ for p in repo.dirstate.parents()]),
+ "nodestoremove": ' '.join([nodemod.hex(n)
+ for n in nodestoremove]),
+ "branchtorestore": branchtorestore,
+ "keep": cls._keep if keep else cls._nokeep,
+ "activebook": activebook or cls._noactivebook
+ }
+ scmutil.simplekeyvaluefile(
+ repo.vfs, cls._filename).write(info,
+ firstline=("%d" % cls._version))
+
+ @classmethod
+ def clear(cls, repo):
+ repo.vfs.unlinkpath(cls._filename, ignoremissing=True)
+
+def cleanupoldbackups(repo):
+ vfs = vfsmod.vfs(repo.vfs.join(backupdir))
+ maxbackups = repo.ui.configint('shelve', 'maxbackups')
+ hgfiles = [f for f in vfs.listdir()
+ if f.endswith('.' + patchextension)]
+ hgfiles = sorted([(vfs.stat(f)[stat.ST_MTIME], f) for f in hgfiles])
+ if maxbackups > 0 and maxbackups < len(hgfiles):
+ bordermtime = hgfiles[-maxbackups][0]
+ else:
+ bordermtime = None
+ for mtime, f in hgfiles[:len(hgfiles) - maxbackups]:
+ if mtime == bordermtime:
+ # keep it, because timestamp can't decide exact order of backups
+ continue
+ base = f[:-(1 + len(patchextension))]
+ for ext in shelvefileextensions:
+ vfs.tryunlink(base + '.' + ext)
+
+def _backupactivebookmark(repo):
+ activebookmark = repo._activebookmark
+ if activebookmark:
+ bookmarks.deactivate(repo)
+ return activebookmark
+
+def _restoreactivebookmark(repo, mark):
+ if mark:
+ bookmarks.activate(repo, mark)
+
+def _aborttransaction(repo, tr):
+ '''Abort current transaction for shelve/unshelve, but keep dirstate
+ '''
+ dirstatebackupname = 'dirstate.shelve'
+ repo.dirstate.savebackup(tr, dirstatebackupname)
+ tr.abort()
+ repo.dirstate.restorebackup(None, dirstatebackupname)
+
+def getshelvename(repo, parent, opts):
+ """Decide on the name this shelve is going to have"""
+ def gennames():
+ yield label
+ for i in itertools.count(1):
+ yield '%s-%02d' % (label, i)
+ name = opts.get('name')
+ label = repo._activebookmark or parent.branch() or 'default'
+ # slashes aren't allowed in filenames, therefore we rename it
+ label = label.replace('/', '_')
+ label = label.replace('\\', '_')
+ # filenames must not start with '.' as it should not be hidden
+ if label.startswith('.'):
+ label = label.replace('.', '_', 1)
+
+ if name:
+ if shelvedfile(repo, name, patchextension).exists():
+ e = _("a shelved change named '%s' already exists") % name
+ raise error.Abort(e)
+
+ # ensure we are not creating a subdirectory or a hidden file
+ if '/' in name or '\\' in name:
+ raise error.Abort(_('shelved change names can not contain slashes'))
+ if name.startswith('.'):
+ raise error.Abort(_("shelved change names can not start with '.'"))
+
+ else:
+ for n in gennames():
+ if not shelvedfile(repo, n, patchextension).exists():
+ name = n
+ break
+
+ return name
+
+def mutableancestors(ctx):
+ """return all mutable ancestors for ctx (included)
+
+ Much faster than the revset ancestors(ctx) & draft()"""
+ seen = {nodemod.nullrev}
+ visit = collections.deque()
+ visit.append(ctx)
+ while visit:
+ ctx = visit.popleft()
+ yield ctx.node()
+ for parent in ctx.parents():
+ rev = parent.rev()
+ if rev not in seen:
+ seen.add(rev)
+ if parent.mutable():
+ visit.append(parent)
+
+def getcommitfunc(extra, interactive, editor=False):
+ def commitfunc(ui, repo, message, match, opts):
+ hasmq = util.safehasattr(repo, 'mq')
+ if hasmq:
+ saved, repo.mq.checkapplied = repo.mq.checkapplied, False
+
+ targetphase = phases.internal
+ if not phases.supportinternal(repo):
+ targetphase = phases.secret
+ overrides = {('phases', 'new-commit'): targetphase}
+ try:
+ editor_ = False
+ if editor:
+ editor_ = cmdutil.getcommiteditor(editform='shelve.shelve',
+ **pycompat.strkwargs(opts))
+ with repo.ui.configoverride(overrides):
+ return repo.commit(message, shelveuser, opts.get('date'),
+ match, editor=editor_, extra=extra)
+ finally:
+ if hasmq:
+ repo.mq.checkapplied = saved
+
+ def interactivecommitfunc(ui, repo, *pats, **opts):
+ opts = pycompat.byteskwargs(opts)
+ match = scmutil.match(repo['.'], pats, {})
+ message = opts['message']
+ return commitfunc(ui, repo, message, match, opts)
+
+ return interactivecommitfunc if interactive else commitfunc
+
+def _nothingtoshelvemessaging(ui, repo, pats, opts):
+ stat = repo.status(match=scmutil.match(repo[None], pats, opts))
+ if stat.deleted:
+ ui.status(_("nothing changed (%d missing files, see "
+ "'hg status')\n") % len(stat.deleted))
+ else:
+ ui.status(_("nothing changed\n"))
+
+def _shelvecreatedcommit(repo, node, name, match):
+ info = {'node': nodemod.hex(node)}
+ shelvedfile(repo, name, 'shelve').writeinfo(info)
+ bases = list(mutableancestors(repo[node]))
+ shelvedfile(repo, name, 'hg').writebundle(bases, node)
+ with shelvedfile(repo, name, patchextension).opener('wb') as fp:
+ cmdutil.exportfile(repo, [node], fp, opts=mdiff.diffopts(git=True),
+ match=match)
+
+def _includeunknownfiles(repo, pats, opts, extra):
+ s = repo.status(match=scmutil.match(repo[None], pats, opts),
+ unknown=True)
+ if s.unknown:
+ extra['shelve_unknown'] = '\0'.join(s.unknown)
+ repo[None].add(s.unknown)
+
+def _finishshelve(repo, tr):
+ if phases.supportinternal(repo):
+ tr.close()
+ else:
+ _aborttransaction(repo, tr)
+
+def createcmd(ui, repo, pats, opts):
+ """subcommand that creates a new shelve"""
+ with repo.wlock():
+ cmdutil.checkunfinished(repo)
+ return _docreatecmd(ui, repo, pats, opts)
+
+def _docreatecmd(ui, repo, pats, opts):
+ wctx = repo[None]
+ parents = wctx.parents()
+ parent = parents[0]
+ origbranch = wctx.branch()
+
+ if parent.node() != nodemod.nullid:
+ desc = "changes to: %s" % parent.description().split('\n', 1)[0]
+ else:
+ desc = '(changes in empty repository)'
+
+ if not opts.get('message'):
+ opts['message'] = desc
+
+ lock = tr = activebookmark = None
+ try:
+ lock = repo.lock()
+
+ # use an uncommitted transaction to generate the bundle to avoid
+ # pull races. ensure we don't print the abort message to stderr.
+ tr = repo.transaction('shelve', report=lambda x: None)
+
+ interactive = opts.get('interactive', False)
+ includeunknown = (opts.get('unknown', False) and
+ not opts.get('addremove', False))
+
+ name = getshelvename(repo, parent, opts)
+ activebookmark = _backupactivebookmark(repo)
+ extra = {'internal': 'shelve'}
+ if includeunknown:
+ _includeunknownfiles(repo, pats, opts, extra)
+
+ if _iswctxonnewbranch(repo) and not _isbareshelve(pats, opts):
+ # In non-bare shelve we don't store newly created branch
+ # at bundled commit
+ repo.dirstate.setbranch(repo['.'].branch())
+
+ commitfunc = getcommitfunc(extra, interactive, editor=True)
+ if not interactive:
+ node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
+ else:
+ node = cmdutil.dorecord(ui, repo, commitfunc, None,
+ False, cmdutil.recordfilter, *pats,
+ **pycompat.strkwargs(opts))
+ if not node:
+ _nothingtoshelvemessaging(ui, repo, pats, opts)
+ return 1
+
+ # Create a matcher so that prefetch doesn't attempt to fetch
+ # the entire repository pointlessly, and as an optimisation
+ # for movedirstate, if needed.
+ match = scmutil.matchfiles(repo, repo[node].files())
+ _shelvecreatedcommit(repo, node, name, match)
+
+ if ui.formatted():
+ desc = stringutil.ellipsis(desc, ui.termwidth())
+ ui.status(_('shelved as %s\n') % name)
+ if opts['keep']:
+ with repo.dirstate.parentchange():
+ scmutil.movedirstate(repo, parent, match)
+ else:
+ hg.update(repo, parent.node())
+ if origbranch != repo['.'].branch() and not _isbareshelve(pats, opts):
+ repo.dirstate.setbranch(origbranch)
+
+ _finishshelve(repo, tr)
+ finally:
+ _restoreactivebookmark(repo, activebookmark)
+ lockmod.release(tr, lock)
+
+def _isbareshelve(pats, opts):
+ return (not pats
+ and not opts.get('interactive', False)
+ and not opts.get('include', False)
+ and not opts.get('exclude', False))
+
+def _iswctxonnewbranch(repo):
+ return repo[None].branch() != repo['.'].branch()
+
+def cleanupcmd(ui, repo):
+ """subcommand that deletes all shelves"""
+
+ with repo.wlock():
+ for (name, _type) in repo.vfs.readdir(shelvedir):
+ suffix = name.rsplit('.', 1)[-1]
+ if suffix in shelvefileextensions:
+ shelvedfile(repo, name).movetobackup()
+ cleanupoldbackups(repo)
+
+def deletecmd(ui, repo, pats):
+ """subcommand that deletes a specific shelve"""
+ if not pats:
+ raise error.Abort(_('no shelved changes specified!'))
+ with repo.wlock():
+ try:
+ for name in pats:
+ for suffix in shelvefileextensions:
+ shfile = shelvedfile(repo, name, suffix)
+ # patch file is necessary, as it should
+ # be present for any kind of shelve,
+ # but the .hg file is optional as in future we
+ # will add obsolete shelve with does not create a
+ # bundle
+ if shfile.exists() or suffix == patchextension:
+ shfile.movetobackup()
+ cleanupoldbackups(repo)
+ except OSError as err:
+ if err.errno != errno.ENOENT:
+ raise
+ raise error.Abort(_("shelved change '%s' not found") % name)
+
+def listshelves(repo):
+ """return all shelves in repo as list of (time, filename)"""
+ try:
+ names = repo.vfs.readdir(shelvedir)
+ except OSError as err:
+ if err.errno != errno.ENOENT:
+ raise
+ return []
+ info = []
+ for (name, _type) in names:
+ pfx, sfx = name.rsplit('.', 1)
+ if not pfx or sfx != patchextension:
+ continue
+ st = shelvedfile(repo, name).stat()
+ info.append((st[stat.ST_MTIME], shelvedfile(repo, pfx).filename()))
+ return sorted(info, reverse=True)
+
+def listcmd(ui, repo, pats, opts):
+ """subcommand that displays the list of shelves"""
+ pats = set(pats)
+ width = 80
+ if not ui.plain():
+ width = ui.termwidth()
+ namelabel = 'shelve.newest'
+ ui.pager('shelve')
+ for mtime, name in listshelves(repo):
+ sname = util.split(name)[1]
+ if pats and sname not in pats:
+ continue
+ ui.write(sname, label=namelabel)
+ namelabel = 'shelve.name'
+ if ui.quiet:
+ ui.write('\n')
+ continue
+ ui.write(' ' * (16 - len(sname)))
+ used = 16
+ date = dateutil.makedate(mtime)
+ age = '(%s)' % templatefilters.age(date, abbrev=True)
+ ui.write(age, label='shelve.age')
+ ui.write(' ' * (12 - len(age)))
+ used += 12
+ with open(name + '.' + patchextension, 'rb') as fp:
+ while True:
+ line = fp.readline()
+ if not line:
+ break
+ if not line.startswith('#'):
+ desc = line.rstrip()
+ if ui.formatted():
+ desc = stringutil.ellipsis(desc, width - used)
+ ui.write(desc)
+ break
+ ui.write('\n')
+ if not (opts['patch'] or opts['stat']):
+ continue
+ difflines = fp.readlines()
+ if opts['patch']:
+ for chunk, label in patch.difflabel(iter, difflines):
+ ui.write(chunk, label=label)
+ if opts['stat']:
+ for chunk, label in patch.diffstatui(difflines, width=width):
+ ui.write(chunk, label=label)
+
+def patchcmds(ui, repo, pats, opts):
+ """subcommand that displays shelves"""
+ if len(pats) == 0:
+ shelves = listshelves(repo)
+ if not shelves:
+ raise error.Abort(_("there are no shelves to show"))
+ mtime, name = shelves[0]
+ sname = util.split(name)[1]
+ pats = [sname]
+
+ for shelfname in pats:
+ if not shelvedfile(repo, shelfname, patchextension).exists():
+ raise error.Abort(_("cannot find shelf %s") % shelfname)
+
+ listcmd(ui, repo, pats, opts)
+
+def checkparents(repo, state):
+ """check parent while resuming an unshelve"""
+ if state.parents != repo.dirstate.parents():
+ raise error.Abort(_('working directory parents do not match unshelve '
+ 'state'))
+
+def _loadshelvedstate(ui, repo, opts):
+ try:
+ state = shelvedstate.load(repo)
+ if opts.get('keep') is None:
+ opts['keep'] = state.keep
+ except IOError as err:
+ if err.errno != errno.ENOENT:
+ raise
+ cmdutil.wrongtooltocontinue(repo, _('unshelve'))
+ except error.CorruptedState as err:
+ ui.debug(pycompat.bytestr(err) + '\n')
+ if opts.get('continue'):
+ msg = _('corrupted shelved state file')
+ hint = _('please run hg unshelve --abort to abort unshelve '
+ 'operation')
+ raise error.Abort(msg, hint=hint)
+ elif opts.get('abort'):
+ shelvedstate.clear(repo)
+ raise error.Abort(_('could not read shelved state file, your '
+ 'working copy may be in an unexpected state\n'
+ 'please update to some commit\n'))
+ return state
+
+def unshelveabort(ui, repo, state):
+ """subcommand that abort an in-progress unshelve"""
+ with repo.lock():
+ try:
+ checkparents(repo, state)
+
+ merge.update(repo, state.pendingctx, branchmerge=False, force=True)
+ if (state.activebookmark
+ and state.activebookmark in repo._bookmarks):
+ bookmarks.activate(repo, state.activebookmark)
+ mergefiles(ui, repo, state.wctx, state.pendingctx)
+ if not phases.supportinternal(repo):
+ repair.strip(ui, repo, state.nodestoremove, backup=False,
+ topic='shelve')
+ finally:
+ shelvedstate.clear(repo)
+ ui.warn(_("unshelve of '%s' aborted\n") % state.name)
+
+def hgabortunshelve(ui, repo):
+ """logic to abort unshelve using 'hg abort"""
+ with repo.wlock():
+ state = _loadshelvedstate(ui, repo, {'abort' : True})
+ return unshelveabort(ui, repo, state)
+
+def mergefiles(ui, repo, wctx, shelvectx):
+ """updates to wctx and merges the changes from shelvectx into the
+ dirstate."""
+ with ui.configoverride({('ui', 'quiet'): True}):
+ hg.update(repo, wctx.node())
+ ui.pushbuffer(True)
+ cmdutil.revert(ui, repo, shelvectx, repo.dirstate.parents())
+ ui.popbuffer()
+
+def restorebranch(ui, repo, branchtorestore):
+ if branchtorestore and branchtorestore != repo.dirstate.branch():
+ repo.dirstate.setbranch(branchtorestore)
+ ui.status(_('marked working directory as branch %s\n')
+ % branchtorestore)
+
+def unshelvecleanup(ui, repo, name, opts):
+ """remove related files after an unshelve"""
+ if not opts.get('keep'):
+ for filetype in shelvefileextensions:
+ shfile = shelvedfile(repo, name, filetype)
+ if shfile.exists():
+ shfile.movetobackup()
+ cleanupoldbackups(repo)
+def unshelvecontinue(ui, repo, state, opts, basename=None):
+ """subcommand to continue an in-progress unshelve"""
+ # We're finishing off a merge. First parent is our original
+ # parent, second is the temporary "fake" commit we're unshelving.
+ interactive = opts.get('interactive')
+ with repo.lock():
+ checkparents(repo, state)
+ ms = merge.mergestate.read(repo)
+ if list(ms.unresolved()):
+ raise error.Abort(
+ _("unresolved conflicts, can't continue"),
+ hint=_("see 'hg resolve', then 'hg unshelve --continue'"))
+
+ shelvectx = repo[state.parents[1]]
+ pendingctx = state.pendingctx
+
+ with repo.dirstate.parentchange():
+ repo.setparents(state.pendingctx.node(), nodemod.nullid)
+ repo.dirstate.write(repo.currenttransaction())
+
+ targetphase = phases.internal
+ if not phases.supportinternal(repo):
+ targetphase = phases.secret
+ overrides = {('phases', 'new-commit'): targetphase}
+ with repo.ui.configoverride(overrides, 'unshelve'):
+ with repo.dirstate.parentchange():
+ repo.setparents(state.parents[0], nodemod.nullid)
+ if not interactive:
+ ispartialunshelve = False
+ newnode = repo.commit(text=shelvectx.description(),
+ extra=shelvectx.extra(),
+ user=shelvectx.user(),
+ date=shelvectx.date())
+ else:
+ newnode, ispartialunshelve = _dounshelveinteractive(ui,
+ repo, shelvectx, basename, opts)
+
+ if newnode is None:
+ # If it ended up being a no-op commit, then the normal
+ # merge state clean-up path doesn't happen, so do it
+ # here. Fix issue5494
+ merge.mergestate.clean(repo)
+ shelvectx = state.pendingctx
+ msg = _('note: unshelved changes already existed '
+ 'in the working copy\n')
+ ui.status(msg)
+ else:
+ # only strip the shelvectx if we produced one
+ state.nodestoremove.append(newnode)
+ shelvectx = repo[newnode]
+
+ hg.updaterepo(repo, pendingctx.node(), overwrite=False)
+ mergefiles(ui, repo, state.wctx, shelvectx)
+ restorebranch(ui, repo, state.branchtorestore)
+
+ if not ispartialunshelve:
+ if not phases.supportinternal(repo):
+ repair.strip(ui, repo, state.nodestoremove, backup=False,
+ topic='shelve')
+ shelvedstate.clear(repo)
+ unshelvecleanup(ui, repo, state.name, opts)
+ _restoreactivebookmark(repo, state.activebookmark)
+ ui.status(_("unshelve of '%s' complete\n") % state.name)
+
+def hgcontinueunshelve(ui, repo):
+ """logic to resume unshelve using 'hg continue'"""
+ with repo.wlock():
+ state = _loadshelvedstate(ui, repo, {'continue' : True})
+ return unshelvecontinue(ui, repo, state, {'keep' : state.keep})
+
+def _commitworkingcopychanges(ui, repo, opts, tmpwctx):
+ """Temporarily commit working copy changes before moving unshelve commit"""
+ # Store pending changes in a commit and remember added in case a shelve
+ # contains unknown files that are part of the pending change
+ s = repo.status()
+ addedbefore = frozenset(s.added)
+ if not (s.modified or s.added or s.removed):
+ return tmpwctx, addedbefore
+ ui.status(_("temporarily committing pending changes "
+ "(restore with 'hg unshelve --abort')\n"))
+ extra = {'internal': 'shelve'}
+ commitfunc = getcommitfunc(extra=extra, interactive=False,
+ editor=False)
+ tempopts = {}
+ tempopts['message'] = "pending changes temporary commit"
+ tempopts['date'] = opts.get('date')
+ with ui.configoverride({('ui', 'quiet'): True}):
+ node = cmdutil.commit(ui, repo, commitfunc, [], tempopts)
+ tmpwctx = repo[node]
+ return tmpwctx, addedbefore
+
+def _unshelverestorecommit(ui, repo, tr, basename):
+ """Recreate commit in the repository during the unshelve"""
+ repo = repo.unfiltered()
+ node = None
+ if shelvedfile(repo, basename, 'shelve').exists():
+ node = shelvedfile(repo, basename, 'shelve').readinfo()['node']
+ if node is None or node not in repo:
+ with ui.configoverride({('ui', 'quiet'): True}):
+ shelvectx = shelvedfile(repo, basename, 'hg').applybundle(tr)
+ # We might not strip the unbundled changeset, so we should keep track of
+ # the unshelve node in case we need to reuse it (eg: unshelve --keep)
+ if node is None:
+ info = {'node': nodemod.hex(shelvectx.node())}
+ shelvedfile(repo, basename, 'shelve').writeinfo(info)
+ else:
+ shelvectx = repo[node]
+
+ return repo, shelvectx
+
+def _dounshelveinteractive(ui, repo, shelvectx, basename, opts):
+ """The user might want to unshelve certain changes only from the stored
+ shelve. So, we would create two commits. One with requested changes to
+ unshelve at that time and the latter is shelved for future.
+ """
+ opts['message'] = shelvectx.description()
+ opts['interactive-unshelve'] = True
+ pats = []
+ commitfunc = getcommitfunc(shelvectx.extra(), interactive=True,
+ editor=True)
+ newnode = cmdutil.dorecord(ui, repo, commitfunc, None, False,
+ cmdutil.recordfilter, *pats,
+ **pycompat.strkwargs(opts))
+ snode = repo.commit(text=shelvectx.description(),
+ extra=shelvectx.extra(),
+ user=shelvectx.user(),
+ date=shelvectx.date())
+ m = scmutil.matchfiles(repo, repo[snode].files())
+ if snode:
+ _shelvecreatedcommit(repo, snode, basename, m)
+
+ return newnode, bool(snode)
+
+def _rebaserestoredcommit(ui, repo, opts, tr, oldtiprev, basename, pctx,
+ tmpwctx, shelvectx, branchtorestore,
+ activebookmark):
+ """Rebase restored commit from its original location to a destination"""
+ # If the shelve is not immediately on top of the commit
+ # we'll be merging with, rebase it to be on top.
+ interactive = opts.get('interactive')
+ if tmpwctx.node() == shelvectx.p1().node() and not interactive:
+ # We won't skip on interactive mode because, the user might want to
+ # unshelve certain changes only.
+ return shelvectx, False
+
+ overrides = {
+ ('ui', 'forcemerge'): opts.get('tool', ''),
+ ('phases', 'new-commit'): phases.secret,
+ }
+ with repo.ui.configoverride(overrides, 'unshelve'):
+ ui.status(_('rebasing shelved changes\n'))
+ stats = merge.graft(repo, shelvectx, shelvectx.p1(),
+ labels=['shelve', 'working-copy'],
+ keepconflictparent=True)
+ if stats.unresolvedcount:
+ tr.close()
+
+ nodestoremove = [repo.changelog.node(rev)
+ for rev in pycompat.xrange(oldtiprev, len(repo))]
+ shelvedstate.save(repo, basename, pctx, tmpwctx, nodestoremove,
+ branchtorestore, opts.get('keep'), activebookmark)
+ raise error.InterventionRequired(
+ _("unresolved conflicts (see 'hg resolve', then "
+ "'hg unshelve --continue')"))
+
+ with repo.dirstate.parentchange():
+ repo.setparents(tmpwctx.node(), nodemod.nullid)
+ if not interactive:
+ ispartialunshelve = False
+ newnode = repo.commit(text=shelvectx.description(),
+ extra=shelvectx.extra(),
+ user=shelvectx.user(),
+ date=shelvectx.date())
+ else:
+ newnode, ispartialunshelve = _dounshelveinteractive(ui, repo,
+ shelvectx, basename, opts)
+
+ if newnode is None:
+ # If it ended up being a no-op commit, then the normal
+ # merge state clean-up path doesn't happen, so do it
+ # here. Fix issue5494
+ merge.mergestate.clean(repo)
+ shelvectx = tmpwctx
+ msg = _('note: unshelved changes already existed '
+ 'in the working copy\n')
+ ui.status(msg)
+ else:
+ shelvectx = repo[newnode]
+ hg.updaterepo(repo, tmpwctx.node(), False)
+
+ return shelvectx, ispartialunshelve
+
+def _forgetunknownfiles(repo, shelvectx, addedbefore):
+ # Forget any files that were unknown before the shelve, unknown before
+ # unshelve started, but are now added.
+ shelveunknown = shelvectx.extra().get('shelve_unknown')
+ if not shelveunknown:
+ return
+ shelveunknown = frozenset(shelveunknown.split('\0'))
+ addedafter = frozenset(repo.status().added)
+ toforget = (addedafter & shelveunknown) - addedbefore
+ repo[None].forget(toforget)
+
+def _finishunshelve(repo, oldtiprev, tr, activebookmark):
+ _restoreactivebookmark(repo, activebookmark)
+ # The transaction aborting will strip all the commits for us,
+ # but it doesn't update the inmemory structures, so addchangegroup
+ # hooks still fire and try to operate on the missing commits.
+ # Clean up manually to prevent this.
+ repo.unfiltered().changelog.strip(oldtiprev, tr)
+ _aborttransaction(repo, tr)
+
+def _checkunshelveuntrackedproblems(ui, repo, shelvectx):
+ """Check potential problems which may result from working
+ copy having untracked changes."""
+ wcdeleted = set(repo.status().deleted)
+ shelvetouched = set(shelvectx.files())
+ intersection = wcdeleted.intersection(shelvetouched)
+ if intersection:
+ m = _("shelved change touches missing files")
+ hint = _("run hg status to see which files are missing")
+ raise error.Abort(m, hint=hint)
+
+def dounshelve(ui, repo, *shelved, **opts):
+ opts = pycompat.byteskwargs(opts)
+ abortf = opts.get('abort')
+ continuef = opts.get('continue')
+ interactive = opts.get('interactive')
+ if not abortf and not continuef:
+ cmdutil.checkunfinished(repo)
+ shelved = list(shelved)
+ if opts.get("name"):
+ shelved.append(opts["name"])
+
+ if abortf or continuef and not interactive:
+ if abortf and continuef:
+ raise error.Abort(_('cannot use both abort and continue'))
+ if shelved:
+ raise error.Abort(_('cannot combine abort/continue with '
+ 'naming a shelved change'))
+ if abortf and opts.get('tool', False):
+ ui.warn(_('tool option will be ignored\n'))
+
+ state = _loadshelvedstate(ui, repo, opts)
+ if abortf:
+ return unshelveabort(ui, repo, state)
+ elif continuef:
+ return unshelvecontinue(ui, repo, state, opts)
+ elif len(shelved) > 1:
+ raise error.Abort(_('can only unshelve one change at a time'))
+ elif not shelved:
+ shelved = listshelves(repo)
+ if not shelved:
+ raise error.Abort(_('no shelved changes to apply!'))
+ basename = util.split(shelved[0][1])[1]
+ ui.status(_("unshelving change '%s'\n") % basename)
+ elif shelved:
+ basename = shelved[0]
+ if continuef and interactive:
+ state = _loadshelvedstate(ui, repo, opts)
+ return unshelvecontinue(ui, repo, state, opts, basename)
+
+ if not shelvedfile(repo, basename, patchextension).exists():
+ raise error.Abort(_("shelved change '%s' not found") % basename)
+
+ repo = repo.unfiltered()
+ lock = tr = None
+ try:
+ lock = repo.lock()
+ tr = repo.transaction('unshelve', report=lambda x: None)
+ oldtiprev = len(repo)
+
+ pctx = repo['.']
+ tmpwctx = pctx
+ # The goal is to have a commit structure like so:
+ # ...-> pctx -> tmpwctx -> shelvectx
+ # where tmpwctx is an optional commit with the user's pending changes
+ # and shelvectx is the unshelved changes. Then we merge it all down
+ # to the original pctx.
+
+ activebookmark = _backupactivebookmark(repo)
+ tmpwctx, addedbefore = _commitworkingcopychanges(ui, repo, opts,
+ tmpwctx)
+ repo, shelvectx = _unshelverestorecommit(ui, repo, tr, basename)
+ _checkunshelveuntrackedproblems(ui, repo, shelvectx)
+ branchtorestore = ''
+ if shelvectx.branch() != shelvectx.p1().branch():
+ branchtorestore = shelvectx.branch()
+
+ shelvectx, ispartialunshelve = _rebaserestoredcommit(ui, repo, opts,
+ tr, oldtiprev, basename, pctx, tmpwctx, shelvectx,
+ branchtorestore, activebookmark)
+ overrides = {('ui', 'forcemerge'): opts.get('tool', '')}
+ with ui.configoverride(overrides, 'unshelve'):
+ mergefiles(ui, repo, pctx, shelvectx)
+ restorebranch(ui, repo, branchtorestore)
+ if not ispartialunshelve:
+ _forgetunknownfiles(repo, shelvectx, addedbefore)
+
+ shelvedstate.clear(repo)
+ _finishunshelve(repo, oldtiprev, tr, activebookmark)
+ unshelvecleanup(ui, repo, basename, opts)
+ finally:
+ if tr:
+ tr.release()
+ lockmod.release(lock)
--- a/mercurial/sparse.py Tue Jul 09 10:07:35 2019 -0400
+++ b/mercurial/sparse.py Mon Jul 22 14:00:33 2019 -0400
@@ -248,7 +248,8 @@
typeactions = mergemod.emptyactions()
typeactions['r'] = actions
- mergemod.applyupdates(repo, typeactions, repo[None], repo['.'], False)
+ mergemod.applyupdates(repo, typeactions, repo[None], repo['.'], False,
+ wantfiledata=False)
# Fix dirstate
for file in dropped:
@@ -382,7 +383,7 @@
typeactions = mergemod.emptyactions()
typeactions['g'] = actions
mergemod.applyupdates(repo, typeactions, repo[None], repo['.'],
- False)
+ False, wantfiledata=False)
dirstate = repo.dirstate
for file, flags, msg in actions:
@@ -486,7 +487,8 @@
for f, (m, args, msg) in actions.iteritems():
typeactions[m].append((f, args, msg))
- mergemod.applyupdates(repo, typeactions, repo[None], repo['.'], False)
+ mergemod.applyupdates(repo, typeactions, repo[None], repo['.'], False,
+ wantfiledata=False)
# Fix dirstate
for file in added:
--- a/mercurial/sslutil.py Tue Jul 09 10:07:35 2019 -0400
+++ b/mercurial/sslutil.py Mon Jul 22 14:00:33 2019 -0400
@@ -16,6 +16,7 @@
from .i18n import _
from . import (
+ encoding,
error,
node,
pycompat,
@@ -348,6 +349,17 @@
if not serverhostname:
raise error.Abort(_('serverhostname argument is required'))
+ if b'SSLKEYLOGFILE' in encoding.environ:
+ try:
+ import sslkeylog
+ sslkeylog.set_keylog(pycompat.fsdecode(
+ encoding.environ[b'SSLKEYLOGFILE']))
+ ui.warn(
+ b'sslkeylog enabled by SSLKEYLOGFILE environment variable\n')
+ except ImportError:
+ ui.warn(b'sslkeylog module missing, '
+ b'but SSLKEYLOGFILE set in environment\n')
+
for f in (keyfile, certfile):
if f and not os.path.exists(f):
raise error.Abort(
--- a/mercurial/state.py Tue Jul 09 10:07:35 2019 -0400
+++ b/mercurial/state.py Mon Jul 22 14:00:33 2019 -0400
@@ -19,6 +19,8 @@
from __future__ import absolute_import
+from .i18n import _
+
from . import (
error,
util,
@@ -85,3 +87,134 @@
def exists(self):
"""check whether the state file exists or not"""
return self._repo.vfs.exists(self.fname)
+
+class _statecheck(object):
+ """a utility class that deals with multistep operations like graft,
+ histedit, bisect, update etc and check whether such commands
+ are in an unfinished conditition or not and return appropriate message
+ and hint.
+ It also has the ability to register and determine the states of any new
+ multistep operation or multistep command extension.
+ """
+
+ def __init__(self, opname, fname, clearable, allowcommit, reportonly,
+ continueflag, stopflag, cmdmsg, cmdhint, statushint,
+ abortfunc, continuefunc):
+ self._opname = opname
+ self._fname = fname
+ self._clearable = clearable
+ self._allowcommit = allowcommit
+ self._reportonly = reportonly
+ self._continueflag = continueflag
+ self._stopflag = stopflag
+ self._cmdmsg = cmdmsg
+ self._cmdhint = cmdhint
+ self._statushint = statushint
+ self.abortfunc = abortfunc
+ self.continuefunc = continuefunc
+
+ def statusmsg(self):
+ """returns the hint message corresponding to the command for
+ hg status --verbose
+ """
+ if not self._statushint:
+ hint = (_('To continue: hg %s --continue\n'
+ 'To abort: hg %s --abort') % (self._opname,
+ self._opname))
+ if self._stopflag:
+ hint = hint + (_('\nTo stop: hg %s --stop') %
+ (self._opname))
+ return hint
+ return self._statushint
+
+ def hint(self):
+ """returns the hint message corresponding to an interrupted
+ operation
+ """
+ if not self._cmdhint:
+ return (_("use 'hg %s --continue' or 'hg %s --abort'") %
+ (self._opname, self._opname))
+ return self._cmdhint
+
+ def msg(self):
+ """returns the status message corresponding to the command"""
+ if not self._cmdmsg:
+ return _('%s in progress') % (self._opname)
+ return self._cmdmsg
+
+ def continuemsg(self):
+ """ returns appropriate continue message corresponding to command"""
+ return _('hg %s --continue') % (self._opname)
+
+ def isunfinished(self, repo):
+ """determines whether a multi-step operation is in progress
+ or not
+ """
+ if self._opname == 'merge':
+ return len(repo[None].parents()) > 1
+ else:
+ return repo.vfs.exists(self._fname)
+
+# A list of statecheck objects for multistep operations like graft.
+_unfinishedstates = []
+
+def addunfinished(opname, fname, clearable=False, allowcommit=False,
+ reportonly=False, continueflag=False, stopflag=False,
+ cmdmsg="", cmdhint="", statushint="", abortfunc=None,
+ continuefunc=None):
+ """this registers a new command or operation to unfinishedstates
+ opname is the name the command or operation
+ fname is the file name in which data should be stored in .hg directory.
+ It is None for merge command.
+ clearable boolean determines whether or not interrupted states can be
+ cleared by running `hg update -C .` which in turn deletes the
+ state file.
+ allowcommit boolean decides whether commit is allowed during interrupted
+ state or not.
+ reportonly flag is used for operations like bisect where we just
+ need to detect the operation using 'hg status --verbose'
+ continueflag is a boolean determines whether or not a command supports
+ `--continue` option or not.
+ stopflag is a boolean that determines whether or not a command supports
+ --stop flag
+ cmdmsg is used to pass a different status message in case standard
+ message of the format "abort: cmdname in progress" is not desired.
+ cmdhint is used to pass a different hint message in case standard
+ message of the format "To continue: hg cmdname --continue
+ To abort: hg cmdname --abort" is not desired.
+ statushint is used to pass a different status message in case standard
+ message of the format ('To continue: hg cmdname --continue'
+ 'To abort: hg cmdname --abort') is not desired
+ abortfunc stores the function required to abort an unfinished state.
+ continuefunc stores the function required to finish an interrupted
+ operation.
+ """
+ statecheckobj = _statecheck(opname, fname, clearable, allowcommit,
+ reportonly, continueflag, stopflag, cmdmsg,
+ cmdhint, statushint, abortfunc, continuefunc)
+ if opname == 'merge':
+ _unfinishedstates.append(statecheckobj)
+ else:
+ _unfinishedstates.insert(0, statecheckobj)
+
+addunfinished(
+ 'update', fname='updatestate', clearable=True,
+ cmdmsg=_('last update was interrupted'),
+ cmdhint=_("use 'hg update' to get a consistent checkout"),
+ statushint=_("To continue: hg update")
+)
+addunfinished(
+ 'bisect', fname='bisect.state', allowcommit=True, reportonly=True,
+ statushint=_('To mark the changeset good: hg bisect --good\n'
+ 'To mark the changeset bad: hg bisect --bad\n'
+ 'To abort: hg bisect --reset\n')
+)
+
+def getrepostate(repo):
+ # experimental config: commands.status.skipstates
+ skip = set(repo.ui.configlist('commands', 'status.skipstates'))
+ for state in _unfinishedstates:
+ if state._opname in skip:
+ continue
+ if state.isunfinished(repo):
+ return (state._opname, state.statusmsg())
--- a/mercurial/statichttprepo.py Tue Jul 09 10:07:35 2019 -0400
+++ b/mercurial/statichttprepo.py Mon Jul 22 14:00:33 2019 -0400
@@ -155,6 +155,7 @@
self.names = namespaces.namespaces()
self.filtername = None
+ self._extrafilterid = None
try:
requirements = set(self.vfs.read(b'requires').splitlines())
--- a/mercurial/statprof.py Tue Jul 09 10:07:35 2019 -0400
+++ b/mercurial/statprof.py Mon Jul 22 14:00:33 2019 -0400
@@ -678,6 +678,7 @@
for sample in data.samples:
root.add(sample.stack[::-1], sample.time - lasttime)
lasttime = sample.time
+ showtime = kwargs.get(r'showtime', True)
def _write(node, depth, multiple_siblings):
site = node.site
@@ -695,7 +696,9 @@
# lots of string formatting
listpattern = ''.ljust(indent) +\
('\\' if multiple_siblings else '|') +\
- ' %4.1f%% %s %s'
+ ' %4.1f%%' +\
+ (' %5.2fs' % node.count if showtime else '') +\
+ ' %s %s'
liststring = listpattern % (node.count / root.count * 100,
filename, function)
codepattern = '%' + ('%d' % (55 - len(liststring))) + 's %d: %s'
--- a/mercurial/store.py Tue Jul 09 10:07:35 2019 -0400
+++ b/mercurial/store.py Mon Jul 22 14:00:33 2019 -0400
@@ -40,7 +40,7 @@
if path.startswith('data/'):
return matcher(path[len('data/'):-len('.i')])
elif path.startswith('meta/'):
- return matcher.visitdir(path[len('meta/'):-len('/00manifest.i')] or '.')
+ return matcher.visitdir(path[len('meta/'):-len('/00manifest.i')])
raise error.ProgrammingError("cannot decode path %s" % path)
@@ -337,7 +337,7 @@
mode = None
return mode
-_data = ('narrowspec data meta 00manifest.d 00manifest.i'
+_data = ('bookmarks narrowspec data meta 00manifest.d 00manifest.i'
' 00changelog.d 00changelog.i phaseroots obsstore')
def isrevlog(f, kind, st):
@@ -612,7 +612,7 @@
raise
def copylist(self):
- d = ('narrowspec data meta dh fncache phaseroots obsstore'
+ d = ('bookmarks narrowspec data meta dh fncache phaseroots obsstore'
' 00manifest.d 00manifest.i 00changelog.d 00changelog.i')
return (['requires', '00changelog.i'] +
['store/' + f for f in d.split()])
--- a/mercurial/subrepo.py Tue Jul 09 10:07:35 2019 -0400
+++ b/mercurial/subrepo.py Mon Jul 22 14:00:33 2019 -0400
@@ -88,13 +88,15 @@
def _updateprompt(ui, sub, dirty, local, remote):
if dirty:
msg = (_(' subrepository sources for %s differ\n'
- 'use (l)ocal source (%s) or (r)emote source (%s)?'
+ 'you can use (l)ocal source (%s) or (r)emote source (%s).\n'
+ 'what do you want to do?'
'$$ &Local $$ &Remote')
% (subrelpath(sub), local, remote))
else:
msg = (_(' subrepository sources for %s differ (in checked out '
'version)\n'
- 'use (l)ocal source (%s) or (r)emote source (%s)?'
+ 'you can use (l)ocal source (%s) or (r)emote source (%s).\n'
+ 'what do you want to do?'
'$$ &Local $$ &Remote')
% (subrelpath(sub), local, remote))
return ui.promptchoice(msg, 0)
--- a/mercurial/subrepoutil.py Tue Jul 09 10:07:35 2019 -0400
+++ b/mercurial/subrepoutil.py Mon Jul 22 14:00:33 2019 -0400
@@ -168,8 +168,9 @@
prompts['ro'] = r[0]
if repo.ui.promptchoice(
_(' subrepository sources for %(s)s differ\n'
- 'use (l)ocal%(l)s source (%(lo)s)'
- ' or (r)emote%(o)s source (%(ro)s)?'
+ 'you can use (l)ocal%(l)s source (%(lo)s)'
+ ' or (r)emote%(o)s source (%(ro)s).\n'
+ 'what do you want to do?'
'$$ &Local $$ &Remote') % prompts, 0):
debug(s, "prompt changed, get", r)
wctx.sub(s).get(r, overwrite)
@@ -186,7 +187,9 @@
option = repo.ui.promptchoice(
_(' subrepository %(s)s diverged (local revision: %(sl)s, '
'remote revision: %(sr)s)\n'
- '(M)erge, keep (l)ocal%(l)s or keep (r)emote%(o)s?'
+ 'you can (m)erge, keep (l)ocal%(l)s or keep '
+ '(r)emote%(o)s.\n'
+ 'what do you want to do?'
'$$ &Merge $$ &Local $$ &Remote')
% prompts, 0)
if option == 0:
--- a/mercurial/tags.py Tue Jul 09 10:07:35 2019 -0400
+++ b/mercurial/tags.py Mon Jul 22 14:00:33 2019 -0400
@@ -13,11 +13,13 @@
from __future__ import absolute_import
import errno
+import io
from .node import (
bin,
hex,
nullid,
+ nullrev,
short,
)
from .i18n import _
@@ -89,7 +91,7 @@
unfi = repo.unfiltered()
tonode = unfi.changelog.node
nodes = [tonode(r) for r in revs]
- fnodes = _getfnodes(ui, repo, nodes[::-1]) # reversed help the cache
+ fnodes = _getfnodes(ui, repo, nodes)
fnodes = _filterfnodes(fnodes, nodes)
return fnodes
@@ -457,7 +459,8 @@
# This is the most expensive part of finding tags, so performance
# depends primarily on the size of newheads. Worst case: no cache
# file, so newheads == repoheads.
- cachefnode = _getfnodes(ui, repo, repoheads)
+ # Reversed order helps the cache ('repoheads' is in descending order)
+ cachefnode = _getfnodes(ui, repo, reversed(repoheads))
# Caller has to iterate over all heads, but can use the filenodes in
# cachefnode to get to each .hgtags revision quickly.
@@ -472,7 +475,7 @@
starttime = util.timer()
fnodescache = hgtagsfnodescache(repo.unfiltered())
cachefnode = {}
- for node in reversed(nodes):
+ for node in nodes:
fnode = fnodescache.getfnode(node)
if fnode != nullid:
cachefnode[node] = fnode
@@ -560,7 +563,7 @@
" branch name\n") % name)
def writetags(fp, names, munge, prevtags):
- fp.seek(0, 2)
+ fp.seek(0, io.SEEK_END)
if prevtags and not prevtags.endswith('\n'):
fp.write('\n')
for name in names:
@@ -691,6 +694,9 @@
If an .hgtags does not exist at the specified revision, nullid is
returned.
"""
+ if node == nullid:
+ return nullid
+
ctx = self._repo[node]
rev = ctx.rev()
@@ -715,12 +721,33 @@
if not computemissing:
return None
- # Populate missing entry.
- try:
- fnode = ctx.filenode('.hgtags')
- except error.LookupError:
- # No .hgtags file on this revision.
- fnode = nullid
+ fnode = None
+ cl = self._repo.changelog
+ p1rev, p2rev = cl._uncheckedparentrevs(rev)
+ p1node = cl.node(p1rev)
+ p1fnode = self.getfnode(p1node, computemissing=False)
+ if p2rev != nullrev:
+ # There is some no-merge changeset where p1 is null and p2 is set
+ # Processing them as merge is just slower, but still gives a good
+ # result.
+ p2node = cl.node(p1rev)
+ p2fnode = self.getfnode(p2node, computemissing=False)
+ if p1fnode != p2fnode:
+ # we cannot rely on readfast because we don't know against what
+ # parent the readfast delta is computed
+ p1fnode = None
+ if p1fnode is not None:
+ mctx = ctx.manifestctx()
+ fnode = mctx.readfast().get('.hgtags')
+ if fnode is None:
+ fnode = p1fnode
+ if fnode is None:
+ # Populate missing entry.
+ try:
+ fnode = ctx.filenode('.hgtags')
+ except error.LookupError:
+ # No .hgtags file on this revision.
+ fnode = nullid
self._writeentry(offset, properprefix, fnode)
return fnode
--- a/mercurial/templatekw.py Tue Jul 09 10:07:35 2019 -0400
+++ b/mercurial/templatekw.py Mon Jul 22 14:00:33 2019 -0400
@@ -290,16 +290,6 @@
statmap.update((f, char) for f in files)
return revcache['filestatusmap'] # {path: statchar}
-def _showfilesbystat(context, mapping, name, index):
- stat = _getfilestatus(context, mapping)
- files = stat[index]
- return templateutil.compatfileslist(context, mapping, name, files)
-
-@templatekeyword('file_adds', requires={'ctx', 'revcache'})
-def showfileadds(context, mapping):
- """List of strings. Files added by this changeset."""
- return _showfilesbystat(context, mapping, 'file_add', 1)
-
@templatekeyword('file_copies',
requires={'repo', 'ctx', 'cache', 'revcache'})
def showfilecopies(context, mapping):
@@ -311,14 +301,10 @@
cache = context.resource(mapping, 'cache')
copies = context.resource(mapping, 'revcache').get('copies')
if copies is None:
- if 'getrenamed' not in cache:
- cache['getrenamed'] = scmutil.getrenamedfn(repo)
- copies = []
- getrenamed = cache['getrenamed']
- for fn in ctx.files():
- rename = getrenamed(fn, ctx.rev())
- if rename:
- copies.append((fn, rename))
+ if 'getcopies' not in cache:
+ cache['getcopies'] = scmutil.getcopiesfn(repo)
+ getcopies = cache['getcopies']
+ copies = getcopies(ctx)
return templateutil.compatfilecopiesdict(context, mapping, 'file_copy',
copies)
@@ -334,15 +320,26 @@
return templateutil.compatfilecopiesdict(context, mapping, 'file_copy',
copies)
+@templatekeyword('file_adds', requires={'ctx', 'revcache'})
+def showfileadds(context, mapping):
+ """List of strings. Files added by this changeset."""
+ ctx = context.resource(mapping, 'ctx')
+ return templateutil.compatfileslist(context, mapping, 'file_add',
+ ctx.filesadded())
+
@templatekeyword('file_dels', requires={'ctx', 'revcache'})
def showfiledels(context, mapping):
"""List of strings. Files removed by this changeset."""
- return _showfilesbystat(context, mapping, 'file_del', 2)
+ ctx = context.resource(mapping, 'ctx')
+ return templateutil.compatfileslist(context, mapping, 'file_del',
+ ctx.filesremoved())
@templatekeyword('file_mods', requires={'ctx', 'revcache'})
def showfilemods(context, mapping):
"""List of strings. Files modified by this changeset."""
- return _showfilesbystat(context, mapping, 'file_mod', 0)
+ ctx = context.resource(mapping, 'ctx')
+ return templateutil.compatfileslist(context, mapping, 'file_mod',
+ ctx.filesmodified())
@templatekeyword('files', requires={'ctx'})
def showfiles(context, mapping):
@@ -572,7 +569,7 @@
@templatekeyword("predecessors", requires={'repo', 'ctx'})
def showpredecessors(context, mapping):
- """Returns the list of the closest visible successors. (EXPERIMENTAL)"""
+ """Returns the list of the closest visible predecessors. (EXPERIMENTAL)"""
repo = context.resource(mapping, 'repo')
ctx = context.resource(mapping, 'ctx')
predecessors = sorted(obsutil.closestpredecessors(repo, ctx.node()))
--- a/mercurial/templateutil.py Tue Jul 09 10:07:35 2019 -0400
+++ b/mercurial/templateutil.py Mon Jul 22 14:00:33 2019 -0400
@@ -874,7 +874,6 @@
def _recursivesymbolblocker(key):
def showrecursion(context, mapping):
raise error.Abort(_("recursive reference '%s' in template") % key)
- showrecursion._requires = () # mark as new-style templatekw
return showrecursion
def runsymbol(context, mapping, key, default=''):
@@ -888,19 +887,6 @@
v = context.process(key, safemapping)
except TemplateNotFound:
v = default
- if callable(v) and getattr(v, '_requires', None) is None:
- # old templatekw: expand all keywords and resources
- # (TODO: drop support for old-style functions. 'f._requires = ()'
- # can be removed.)
- props = {k: context._resources.lookup(mapping, k)
- for k in context._resources.knownkeys()}
- # pass context to _showcompatlist() through templatekw._showlist()
- props['templ'] = context
- props.update(mapping)
- ui = props.get('ui')
- if ui:
- ui.deprecwarn("old-style template keyword '%s'" % key, '4.8')
- return v(**pycompat.strkwargs(props))
if callable(v):
# new templatekw
try:
--- a/mercurial/ui.py Tue Jul 09 10:07:35 2019 -0400
+++ b/mercurial/ui.py Mon Jul 22 14:00:33 2019 -0400
@@ -68,6 +68,8 @@
update.check = noconflict
# Show conflicts information in `hg status`
status.verbose = True
+# Make `hg resolve` with no action (like `-m`) fail instead of re-merging.
+resolve.explicit-re-merge = True
[diff]
git = 1
@@ -102,7 +104,6 @@
#
# histedit =
# rebase =
-# shelve =
# uncommit =
""",
--- a/mercurial/unionrepo.py Tue Jul 09 10:07:35 2019 -0400
+++ b/mercurial/unionrepo.py Mon Jul 22 14:00:33 2019 -0400
@@ -128,9 +128,10 @@
def addrevision(self, text, transaction, link, p1=None, p2=None, d=None):
raise NotImplementedError
- def addgroup(self, deltas, transaction, addrevisioncb=None):
+ def addgroup(self, deltas, linkmapper, transaction, addrevisioncb=None,
+ maybemissingparents=False):
raise NotImplementedError
- def strip(self, rev, minlink):
+ def strip(self, minlink, transaction):
raise NotImplementedError
def checksize(self):
raise NotImplementedError
--- a/mercurial/util.py Tue Jul 09 10:07:35 2019 -0400
+++ b/mercurial/util.py Mon Jul 22 14:00:33 2019 -0400
@@ -53,6 +53,8 @@
stringutil,
)
+rustdirs = policy.importrust('dirstate', 'Dirs')
+
base85 = policy.importmod(r'base85')
osutil = policy.importmod(r'osutil')
parsers = policy.importmod(r'parsers')
@@ -3204,11 +3206,15 @@
if safehasattr(parsers, 'dirs'):
dirs = parsers.dirs
+if rustdirs is not None:
+ dirs = rustdirs
+
def finddirs(path):
pos = path.rfind('/')
while pos != -1:
yield path[:pos]
pos = path.rfind('/', 0, pos)
+ yield ''
# convenient shortcut
--- a/mercurial/utils/procutil.py Tue Jul 09 10:07:35 2019 -0400
+++ b/mercurial/utils/procutil.py Mon Jul 22 14:00:33 2019 -0400
@@ -470,7 +470,8 @@
# See https://phab.mercurial-scm.org/D1701 for discussion
_creationflags = DETACHED_PROCESS | subprocess.CREATE_NEW_PROCESS_GROUP
- def runbgcommand(script, env, shell=False, stdout=None, stderr=None):
+ def runbgcommand(
+ script, env, shell=False, stdout=None, stderr=None, ensurestart=True):
'''Spawn a command without waiting for it to finish.'''
# we can't use close_fds *and* redirect stdin. I'm not sure that we
# need to because the detached process has no console connection.
@@ -480,12 +481,15 @@
creationflags=_creationflags, stdout=stdout,
stderr=stderr)
else:
- def runbgcommand(cmd, env, shell=False, stdout=None, stderr=None):
+ def runbgcommand(
+ cmd, env, shell=False, stdout=None, stderr=None, ensurestart=True):
'''Spawn a command without waiting for it to finish.'''
# double-fork to completely detach from the parent process
# based on http://code.activestate.com/recipes/278731
pid = os.fork()
if pid:
+ if not ensurestart:
+ return
# Parent process
(_pid, status) = os.waitpid(pid, 0)
if os.WIFEXITED(status):
--- a/mercurial/verify.py Tue Jul 09 10:07:35 2019 -0400
+++ b/mercurial/verify.py Mon Jul 22 14:00:33 2019 -0400
@@ -22,9 +22,13 @@
util,
)
-def verify(repo):
+VERIFY_DEFAULT = 0
+VERIFY_FULL = 1
+
+def verify(repo, level=None):
with repo.lock():
- return verifier(repo).verify()
+ v = verifier(repo, level)
+ return v.verify()
def _normpath(f):
# under hg < 2.4, convert didn't sanitize paths properly, so a
@@ -34,10 +38,13 @@
return f
class verifier(object):
- def __init__(self, repo):
+ def __init__(self, repo, level=None):
self.repo = repo.unfiltered()
self.ui = repo.ui
self.match = repo.narrowmatch()
+ if level is None:
+ level = VERIFY_DEFAULT
+ self._level = level
self.badrevs = set()
self.errors = 0
self.warnings = 0
@@ -90,9 +97,9 @@
d = obj.checksize()
if d[0]:
- self.err(None, _("data length off by %d bytes") % d[0], name)
+ self._err(None, _("data length off by %d bytes") % d[0], name)
if d[1]:
- self.err(None, _("index contains %d extra bytes") % d[1], name)
+ self._err(None, _("index contains %d extra bytes") % d[1], name)
if obj.version != revlog.REVLOGV0:
if not self.revlogv1:
@@ -330,6 +337,16 @@
filenodes.setdefault(fullpath, {}).setdefault(fn, lr)
except Exception as inst:
self._exc(lr, _("reading delta %s") % short(n), inst, label)
+ if self._level >= VERIFY_FULL:
+ try:
+ # Various issues can affect manifest. So we read each full
+ # text from storage. This triggers the checks from the core
+ # code (eg: hash verification, filename are ordered, etc.)
+ mfdelta = mfl.get(dir, n).read()
+ except Exception as inst:
+ self._exc(lr, _("reading full manifest %s") % short(n),
+ inst, label)
+
if not dir:
progress.complete()
--- a/mercurial/wireprototypes.py Tue Jul 09 10:07:35 2019 -0400
+++ b/mercurial/wireprototypes.py Mon Jul 22 14:00:33 2019 -0400
@@ -30,7 +30,9 @@
HTTP_WIREPROTO_V2 = 'exp-http-v2-0003'
NARROWCAP = 'exp-narrow-1'
-ELLIPSESCAP = 'exp-ellipses-1'
+ELLIPSESCAP1 = 'exp-ellipses-1'
+ELLIPSESCAP = 'exp-ellipses-2'
+SUPPORTED_ELLIPSESCAP = (ELLIPSESCAP1, ELLIPSESCAP)
# All available wire protocol transports.
TRANSPORTS = {
--- a/mercurial/worker.py Tue Jul 09 10:07:35 2019 -0400
+++ b/mercurial/worker.py Mon Jul 22 14:00:33 2019 -0400
@@ -83,7 +83,8 @@
benefit = linear - (_STARTUP_COST * workers + linear / workers)
return benefit >= 0.15
-def worker(ui, costperarg, func, staticargs, args, threadsafe=True):
+def worker(ui, costperarg, func, staticargs, args, hasretval=False,
+ threadsafe=True):
'''run a function, possibly in parallel in multiple worker
processes.
@@ -91,23 +92,28 @@
costperarg - cost of a single task
- func - function to run
+ func - function to run. It is expected to return a progress iterator.
staticargs - arguments to pass to every invocation of the function
args - arguments to split into chunks, to pass to individual
workers
+ hasretval - when True, func and the current function return an progress
+ iterator then a dict (encoded as an iterator that yield many (False, ..)
+ then a (True, dict)). The dicts are joined in some arbitrary order, so
+ overlapping keys are a bad idea.
+
threadsafe - whether work items are thread safe and can be executed using
a thread-based worker. Should be disabled for CPU heavy tasks that don't
release the GIL.
'''
enabled = ui.configbool('worker', 'enabled')
if enabled and worthwhile(ui, costperarg, len(args), threadsafe=threadsafe):
- return _platformworker(ui, func, staticargs, args)
+ return _platformworker(ui, func, staticargs, args, hasretval)
return func(*staticargs + (args,))
-def _posixworker(ui, func, staticargs, args):
+def _posixworker(ui, func, staticargs, args, hasretval):
workers = _numworkers(ui)
oldhandler = signal.getsignal(signal.SIGINT)
signal.signal(signal.SIGINT, signal.SIG_IGN)
@@ -157,6 +163,7 @@
ui.flush()
parentpid = os.getpid()
pipes = []
+ retval = {}
for pargs in partition(args, workers):
# Every worker gets its own pipe to send results on, so we don't have to
# implement atomic writes larger than PIPE_BUF. Each forked process has
@@ -219,7 +226,11 @@
while openpipes > 0:
for key, events in selector.select():
try:
- yield util.pickle.load(key.fileobj)
+ res = util.pickle.load(key.fileobj)
+ if hasretval and res[0]:
+ retval.update(res[1])
+ else:
+ yield res
except EOFError:
selector.unregister(key.fileobj)
key.fileobj.close()
@@ -237,6 +248,8 @@
if status < 0:
os.kill(os.getpid(), -status)
sys.exit(status)
+ if hasretval:
+ yield True, retval
def _posixexitstatus(code):
'''convert a posix exit status into the same form returned by
@@ -248,7 +261,7 @@
elif os.WIFSIGNALED(code):
return -os.WTERMSIG(code)
-def _windowsworker(ui, func, staticargs, args):
+def _windowsworker(ui, func, staticargs, args, hasretval):
class Worker(threading.Thread):
def __init__(self, taskqueue, resultqueue, func, staticargs, *args,
**kwargs):
@@ -305,6 +318,7 @@
workers = _numworkers(ui)
resultqueue = pycompat.queue.Queue()
taskqueue = pycompat.queue.Queue()
+ retval = {}
# partition work to more pieces than workers to minimize the chance
# of uneven distribution of large tasks between the workers
for pargs in partition(args, workers * 20):
@@ -316,7 +330,11 @@
try:
while len(threads) > 0:
while not resultqueue.empty():
- yield resultqueue.get()
+ res = resultqueue.get()
+ if hasretval and res[0]:
+ retval.update(res[1])
+ else:
+ yield res
threads[0].join(0.05)
finishedthreads = [_t for _t in threads if not _t.is_alive()]
for t in finishedthreads:
@@ -327,7 +345,13 @@
trykillworkers()
raise
while not resultqueue.empty():
- yield resultqueue.get()
+ res = resultqueue.get()
+ if hasretval and res[0]:
+ retval.update(res[1])
+ else:
+ yield res
+ if hasretval:
+ yield True, retval
if pycompat.iswindows:
_platformworker = _windowsworker
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/relnotes/next Mon Jul 22 14:00:33 2019 -0400
@@ -0,0 +1,90 @@
+== New Features ==
+
+ * New config `commands.commit.post-status` shows status after successful
+ commit.
+
+ * `hg root` now has templating support, including support for showing
+ where a repo share's source is. See `hg help -v root` for details.
+
+ * New `--force-close-branch` flag for `hg commit` to forcibly close
+ branch from a non-head changeset.
+
+ * The curses-based interface for commands like `hg commit -i` now supports
+ a range-select mechanism. Select the first line using space like before,
+ navigate to the last line, and press X (capital x) to set all items in
+ the range at once. Lowercase x has been added as a synonym for space to
+ help reinforce the mechanism, and pressing enter/return continues to be a
+ synonym for "toggle the current line and move down to the next item in
+ this section."
+
+== New Experimental Features ==
+
+ * New config `experimental.log.topo` makes `hg log -G` use
+ topological sorting. This is especially useful for aliases since it
+ lets the alias accept an `-r` option while still using topological
+ sorting with or without the `-r` (unlike if you use the `sort(...,
+ topo)` revset).
+
+
+== Bug Fixes ==
+
+ * issue4292: "hg log and {files} {file_adds} {file_mods} {file_dels}
+ in template show wrong files on merged revision". See details in
+ "Backwards Compatibility Changes".
+
+
+== Backwards Compatibility Changes ==
+
+ * Removed (experimental) support for log graph lines mixing
+ parent/grandparent styles. Setting
+ e.g. `experimental.graphstyle.parent = !` and
+ `experimental.graphstyle.grandparent = 3.` would use `!` for the
+ first three lines of the graph and then `.`. This is no longer
+ supported.
+
+ * If `ui.origbackuppath` had been (incorrectly) configured to point
+ to a file, we will now replace that file by a directory and put
+ backups in that directory. This is similar to how we would
+ previously replace files *in* the configured directory by
+ subdirectories.
+
+* Template keyword `{file_mods}`, `{file_adds}`, and `{file_dels}`
+ have changed behavior on merge commits. They used to be relative to
+ the first parent, but they now consider both parents. `{file_adds}`
+ shows files that exists in the commit but did not exist in either
+ parent. `{file_dels}` shows files that do not exist in the commit
+ but existed in either parent. `{file_mods}` show the remaining
+ files from `{files}` that were not in the other two
+ sets.
+
+
+== Internal API Changes ==
+
+ * Matchers are no longer iterable. Use `match.files()` instead.
+
+ * `match.visitdir()` and `match.visitchildrenset()` now expect the
+ empty string instead of '.' to indicate the root directory.
+
+ * `util.dirs()` and `util.finddirs()` now include an entry for the
+ root directory (empty string).
+
+ * shelve is no longer an extension now. it will be turned on by default.
+
+ * New API to manage unfinished operations: Earlier there were distinct APIs
+ which dealt with unfinished states and separate lists maintaining them
+ that are `cmdutil.afterresolvestates`, `cmdutil.unfinishedstates` and
+ `cmdutil.STATES`. Now these have been unified to a single
+ API which handles the various states and their utilities. This API
+ has been added to `state.py`. Now instead of adding to these 3 lists
+ independently a state for a new operation can be registered using
+ `addunfinished()` in `state` module.
+
+ * `cmdutil.checkunfinished()` now includes detection for merge too.
+
+ * merge abort has been disallowed in case an operation of higher
+ precedence is in progress to avoid cases of partial abort of
+ operations.
+
+ * We used to automatically attempt to make extensions compatible with
+ Python 3 (by translating their source code while loading it). We no
+ longer do that.
--- a/rust/Cargo.lock Tue Jul 09 10:07:35 2019 -0400
+++ b/rust/Cargo.lock Mon Jul 22 14:00:33 2019 -0400
@@ -1,9 +1,11 @@
+# This file is automatically @generated by Cargo.
+# It is not intended for manual editing.
[[package]]
name = "aho-corasick"
version = "0.6.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "memchr 2.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "memchr 2.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
@@ -17,8 +19,8 @@
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
-name = "cfg-if"
-version = "0.1.6"
+name = "byteorder"
+version = "1.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
@@ -49,8 +51,12 @@
name = "hg-core"
version = "0.1.0"
dependencies = [
+ "byteorder 1.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "memchr 2.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
"rand 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)",
"rand_pcg 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "regex 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
@@ -74,7 +80,7 @@
[[package]]
name = "lazy_static"
-version = "1.2.0"
+version = "1.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
@@ -84,13 +90,8 @@
[[package]]
name = "memchr"
-version = "2.1.2"
+version = "2.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "cfg-if 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)",
- "libc 0.2.45 (registry+https://github.com/rust-lang/crates.io-index)",
- "version_check 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
-]
[[package]]
name = "num-traits"
@@ -225,7 +226,7 @@
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"aho-corasick 0.6.9 (registry+https://github.com/rust-lang/crates.io-index)",
- "memchr 2.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "memchr 2.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
"regex-syntax 0.6.4 (registry+https://github.com/rust-lang/crates.io-index)",
"thread_local 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"utf8-ranges 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -265,7 +266,7 @@
version = "0.3.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "lazy_static 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
@@ -279,11 +280,6 @@
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
-name = "version_check"
-version = "0.1.5"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-
-[[package]]
name = "winapi"
version = "0.3.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -306,13 +302,13 @@
"checksum aho-corasick 0.6.9 (registry+https://github.com/rust-lang/crates.io-index)" = "1e9a933f4e58658d7b12defcf96dc5c720f20832deebe3e0a19efd3b6aaeeb9e"
"checksum autocfg 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "a6d640bee2da49f60a4068a7fae53acde8982514ab7bae8b8cea9e88cbcfd799"
"checksum bitflags 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)" = "228047a76f468627ca71776ecdebd732a3423081fcf5125585bcd7c49886ce12"
-"checksum cfg-if 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "082bb9b28e00d3c9d39cc03e64ce4cea0f1bb9b3fde493f0cbc008472d22bdf4"
+"checksum byteorder 1.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "a019b10a2a7cdeb292db131fc8113e57ea2a908f6e7894b0c3c671893b65dbeb"
"checksum cloudabi 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "ddfc5b9aa5d4507acaf872de71051dfd0e309860e88966e1051e462a077aac4f"
"checksum cpython 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "b489034e723e7f5109fecd19b719e664f89ef925be785885252469e9822fa940"
"checksum fuchsia-cprng 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "81f7f8eb465745ea9b02e2704612a9946a59fa40572086c6fd49d6ddcf30bf31"
-"checksum lazy_static 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "a374c89b9db55895453a74c1e38861d9deec0b01b405a82516e9d5de4820dea1"
+"checksum lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "bc5729f27f159ddd61f4df6228e827e86643d4d3e7c32183cb30a1c08f604a14"
"checksum libc 0.2.45 (registry+https://github.com/rust-lang/crates.io-index)" = "2d2857ec59fadc0773853c664d2d18e7198e83883e7060b63c924cb077bd5c74"
-"checksum memchr 2.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "db4c41318937f6e76648f42826b1d9ade5c09cafb5aef7e351240a70f39206e9"
+"checksum memchr 2.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "2efc7bc57c883d4a4d6e3246905283d8dae951bb3bd32f49d6ef297f546e1c39"
"checksum num-traits 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)" = "0b3a5d7cc97d6d30d8b9bc8fa19bf45349ffe46241e8816f50f62f6d6aaabee1"
"checksum python27-sys 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "56114c37d4dca82526d74009df7782a28c871ac9d36b19d4cb9e67672258527e"
"checksum python3-sys 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "61e4aac43f833fd637e429506cb2ac9d7df672c4b68f2eaaa163649b7fdc0444"
@@ -335,7 +331,6 @@
"checksum thread_local 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)" = "c6b53e329000edc2b34dbe8545fd20e55a333362d0a321909685a19bd28c3f1b"
"checksum ucd-util 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "535c204ee4d8434478593480b8f86ab45ec9aae0e83c568ca81abf0fd0e88f86"
"checksum utf8-ranges 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "796f7e48bef87609f7ade7e06495a87d5cd06c7866e6a5cbfceffc558a243737"
-"checksum version_check 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "914b1a6776c4c929a602fafd8bc742e06365d4bcbe48c30f9cca5824f70dc9dd"
"checksum winapi 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)" = "92c1eb33641e276cfa214a0522acad57be5c56b10cb348b3c5117db75f3ac4b0"
"checksum winapi-i686-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
"checksum winapi-x86_64-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
--- a/rust/hg-core/Cargo.toml Tue Jul 09 10:07:35 2019 -0400
+++ b/rust/hg-core/Cargo.toml Mon Jul 22 14:00:33 2019 -0400
@@ -3,6 +3,7 @@
version = "0.1.0"
authors = ["Georges Racinet <gracinet@anybox.fr>"]
description = "Mercurial pure Rust core library, with no assumption on Python bindings (FFI)"
+edition = "2018"
[lib]
name = "hg"
@@ -10,3 +11,9 @@
[dev-dependencies]
rand = "*"
rand_pcg = "*"
+
+[dependencies]
+byteorder = "1.3.1"
+lazy_static = "1.3.0"
+memchr = "2.2.0"
+regex = "^1.1"
--- a/rust/hg-core/src/ancestors.rs Tue Jul 09 10:07:35 2019 -0400
+++ b/rust/hg-core/src/ancestors.rs Mon Jul 22 14:00:33 2019 -0400
@@ -8,9 +8,9 @@
//! Rust versions of generic DAG ancestors algorithms for Mercurial
use super::{Graph, GraphError, Revision, NULL_REVISION};
+use crate::dagops;
use std::cmp::max;
use std::collections::{BinaryHeap, HashSet};
-use crate::dagops;
/// Iterator over the ancestors of a given list of revisions
/// This is a generic type, defined and implemented for any Graph, so that
--- a/rust/hg-core/src/dagops.rs Tue Jul 09 10:07:35 2019 -0400
+++ b/rust/hg-core/src/dagops.rs Mon Jul 22 14:00:33 2019 -0400
@@ -13,7 +13,8 @@
//! - Similarly *relative roots* of a collection of `Revision`, we mean
//! those whose parents, if any, don't belong to the collection.
use super::{Graph, GraphError, Revision, NULL_REVISION};
-use std::collections::HashSet;
+use crate::ancestors::AncestorsIterator;
+use std::collections::{BTreeSet, HashSet};
fn remove_parents(
graph: &impl Graph,
@@ -80,6 +81,92 @@
Ok(())
}
+/// Roots of `revs`, passed as a `HashSet`
+///
+/// They are returned in arbitrary order
+pub fn roots<G: Graph>(
+ graph: &G,
+ revs: &HashSet<Revision>,
+) -> Result<Vec<Revision>, GraphError> {
+ let mut roots: Vec<Revision> = Vec::new();
+ for rev in revs {
+ if graph
+ .parents(*rev)?
+ .iter()
+ .filter(|p| **p != NULL_REVISION)
+ .all(|p| !revs.contains(p))
+ {
+ roots.push(*rev);
+ }
+ }
+ Ok(roots)
+}
+
+/// Compute the topological range between two collections of revisions
+///
+/// This is equivalent to the revset `<roots>::<heads>`.
+///
+/// Currently, the given `Graph` has to implement `Clone`, which means
+/// actually cloning just a reference-counted Python pointer if
+/// it's passed over through `rust-cpython`. This is due to the internal
+/// use of `AncestorsIterator`
+///
+/// # Algorithmic details
+///
+/// This is a two-pass swipe inspired from what `reachableroots2` from
+/// `mercurial.cext.parsers` does to obtain the same results.
+///
+/// - first, we climb up the DAG from `heads` in topological order, keeping
+/// them in the vector `heads_ancestors` vector, and adding any element of
+/// `roots` we find among them to the resulting range.
+/// - Then, we iterate on that recorded vector so that a revision is always
+/// emitted after its parents and add all revisions whose parents are already
+/// in the range to the results.
+///
+/// # Performance notes
+///
+/// The main difference with the C implementation is that
+/// the latter uses a flat array with bit flags, instead of complex structures
+/// like `HashSet`, making it faster in most scenarios. In theory, it's
+/// possible that the present implementation could be more memory efficient
+/// for very large repositories with many branches.
+pub fn range(
+ graph: &(impl Graph + Clone),
+ roots: impl IntoIterator<Item = Revision>,
+ heads: impl IntoIterator<Item = Revision>,
+) -> Result<BTreeSet<Revision>, GraphError> {
+ let mut range = BTreeSet::new();
+ let roots: HashSet<Revision> = roots.into_iter().collect();
+ let min_root: Revision = match roots.iter().cloned().min() {
+ None => {
+ return Ok(range);
+ }
+ Some(r) => r,
+ };
+
+ // Internally, AncestorsIterator currently maintains a `HashSet`
+ // of all seen revision, which is also what we record, albeit in an ordered
+ // way. There's room for improvement on this duplication.
+ let ait = AncestorsIterator::new(graph.clone(), heads, min_root, true)?;
+ let mut heads_ancestors: Vec<Revision> = Vec::new();
+ for revres in ait {
+ let rev = revres?;
+ if roots.contains(&rev) {
+ range.insert(rev);
+ }
+ heads_ancestors.push(rev);
+ }
+
+ for rev in heads_ancestors.into_iter().rev() {
+ for parent in graph.parents(rev)?.iter() {
+ if *parent != NULL_REVISION && range.contains(parent) {
+ range.insert(rev);
+ }
+ }
+ }
+ Ok(range)
+}
+
#[cfg(test)]
mod tests {
@@ -137,4 +224,53 @@
Ok(())
}
+ /// Apply `roots()` and sort the result for easier comparison
+ fn roots_sorted(
+ graph: &impl Graph,
+ revs: &[Revision],
+ ) -> Result<Vec<Revision>, GraphError> {
+ let mut as_vec = roots(graph, &revs.iter().cloned().collect())?;
+ as_vec.sort();
+ Ok(as_vec)
+ }
+
+ #[test]
+ fn test_roots() -> Result<(), GraphError> {
+ assert_eq!(roots_sorted(&SampleGraph, &[4, 5, 6])?, vec![4]);
+ assert_eq!(
+ roots_sorted(&SampleGraph, &[4, 1, 6, 12, 0])?,
+ vec![0, 4, 12]
+ );
+ assert_eq!(
+ roots_sorted(&SampleGraph, &[1, 2, 3, 4, 5, 6, 7, 8, 9])?,
+ vec![1, 8]
+ );
+ Ok(())
+ }
+
+ /// Apply `range()` and convert the result into a Vec for easier comparison
+ fn range_vec(
+ graph: impl Graph + Clone,
+ roots: &[Revision],
+ heads: &[Revision],
+ ) -> Result<Vec<Revision>, GraphError> {
+ range(&graph, roots.iter().cloned(), heads.iter().cloned())
+ .map(|bs| bs.into_iter().collect())
+ }
+
+ #[test]
+ fn test_range() -> Result<(), GraphError> {
+ assert_eq!(range_vec(SampleGraph, &[0], &[4])?, vec![0, 1, 2, 4]);
+ assert_eq!(range_vec(SampleGraph, &[0], &[8])?, vec![]);
+ assert_eq!(
+ range_vec(SampleGraph, &[5, 6], &[10, 11, 13])?,
+ vec![5, 10]
+ );
+ assert_eq!(
+ range_vec(SampleGraph, &[5, 6], &[10, 12])?,
+ vec![5, 6, 9, 10, 12]
+ );
+ Ok(())
+ }
+
}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/rust/hg-core/src/dirstate.rs Mon Jul 22 14:00:33 2019 -0400
@@ -0,0 +1,36 @@
+pub mod dirs_multiset;
+pub mod parsers;
+
+#[derive(Debug, PartialEq, Copy, Clone)]
+pub struct DirstateParents<'a> {
+ pub p1: &'a [u8],
+ pub p2: &'a [u8],
+}
+
+/// The C implementation uses all signed types. This will be an issue
+/// either when 4GB+ source files are commonplace or in 2038, whichever
+/// comes first.
+#[derive(Debug, PartialEq)]
+pub struct DirstateEntry {
+ pub state: i8,
+ pub mode: i32,
+ pub mtime: i32,
+ pub size: i32,
+}
+
+pub type DirstateVec = Vec<(Vec<u8>, DirstateEntry)>;
+
+#[derive(Debug, PartialEq)]
+pub struct CopyVecEntry<'a> {
+ pub path: &'a [u8],
+ pub copy_path: &'a [u8],
+}
+
+pub type CopyVec<'a> = Vec<CopyVecEntry<'a>>;
+
+/// The Python implementation passes either a mapping (dirstate) or a flat
+/// iterable (manifest)
+pub enum DirsIterable {
+ Dirstate(DirstateVec),
+ Manifest(Vec<Vec<u8>>),
+}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/rust/hg-core/src/dirstate/dirs_multiset.rs Mon Jul 22 14:00:33 2019 -0400
@@ -0,0 +1,328 @@
+// dirs_multiset.rs
+//
+// Copyright 2019 Raphaël Gomès <rgomes@octobus.net>
+//
+// This software may be used and distributed according to the terms of the
+// GNU General Public License version 2 or any later version.
+
+//! A multiset of directory names.
+//!
+//! Used to counts the references to directories in a manifest or dirstate.
+use crate::{utils::files, DirsIterable, DirstateEntry, DirstateMapError};
+use std::collections::hash_map::{Entry, Iter};
+use std::collections::HashMap;
+
+#[derive(PartialEq, Debug)]
+pub struct DirsMultiset {
+ inner: HashMap<Vec<u8>, u32>,
+}
+
+impl DirsMultiset {
+ /// Initializes the multiset from a dirstate or a manifest.
+ ///
+ /// If `skip_state` is provided, skips dirstate entries with equal state.
+ pub fn new(iterable: DirsIterable, skip_state: Option<i8>) -> Self {
+ let mut multiset = DirsMultiset {
+ inner: HashMap::new(),
+ };
+
+ match iterable {
+ DirsIterable::Dirstate(vec) => {
+ for (ref filename, DirstateEntry { state, .. }) in vec {
+ // This `if` is optimized out of the loop
+ if let Some(skip) = skip_state {
+ if skip != state {
+ multiset.add_path(filename);
+ }
+ } else {
+ multiset.add_path(filename);
+ }
+ }
+ }
+ DirsIterable::Manifest(vec) => {
+ for ref filename in vec {
+ multiset.add_path(filename);
+ }
+ }
+ }
+
+ multiset
+ }
+
+ /// Increases the count of deepest directory contained in the path.
+ ///
+ /// If the directory is not yet in the map, adds its parents.
+ pub fn add_path(&mut self, path: &[u8]) {
+ for subpath in files::find_dirs(path) {
+ if let Some(val) = self.inner.get_mut(subpath) {
+ *val += 1;
+ break;
+ }
+ self.inner.insert(subpath.to_owned(), 1);
+ }
+ }
+
+ /// Decreases the count of deepest directory contained in the path.
+ ///
+ /// If it is the only reference, decreases all parents until one is
+ /// removed.
+ /// If the directory is not in the map, something horrible has happened.
+ pub fn delete_path(
+ &mut self,
+ path: &[u8],
+ ) -> Result<(), DirstateMapError> {
+ for subpath in files::find_dirs(path) {
+ match self.inner.entry(subpath.to_owned()) {
+ Entry::Occupied(mut entry) => {
+ let val = entry.get().clone();
+ if val > 1 {
+ entry.insert(val - 1);
+ break;
+ }
+ entry.remove();
+ }
+ Entry::Vacant(_) => {
+ return Err(DirstateMapError::PathNotFound(
+ path.to_owned(),
+ ))
+ }
+ };
+ }
+
+ Ok(())
+ }
+
+ pub fn contains_key(&self, key: &[u8]) -> bool {
+ self.inner.contains_key(key)
+ }
+
+ pub fn iter(&self) -> Iter<Vec<u8>, u32> {
+ self.inner.iter()
+ }
+
+ pub fn len(&self) -> usize {
+ self.inner.len()
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn test_delete_path_path_not_found() {
+ let mut map = DirsMultiset::new(DirsIterable::Manifest(vec![]), None);
+ let path = b"doesnotexist/";
+ assert_eq!(
+ Err(DirstateMapError::PathNotFound(path.to_vec())),
+ map.delete_path(path)
+ );
+ }
+
+ #[test]
+ fn test_delete_path_empty_path() {
+ let mut map =
+ DirsMultiset::new(DirsIterable::Manifest(vec![vec![]]), None);
+ let path = b"";
+ assert_eq!(Ok(()), map.delete_path(path));
+ assert_eq!(
+ Err(DirstateMapError::PathNotFound(path.to_vec())),
+ map.delete_path(path)
+ );
+ }
+
+ #[test]
+ fn test_delete_path_successful() {
+ let mut map = DirsMultiset {
+ inner: [("", 5), ("a", 3), ("a/b", 2), ("a/c", 1)]
+ .iter()
+ .map(|(k, v)| (k.as_bytes().to_vec(), *v))
+ .collect(),
+ };
+
+ assert_eq!(Ok(()), map.delete_path(b"a/b/"));
+ assert_eq!(Ok(()), map.delete_path(b"a/b/"));
+ assert_eq!(
+ Err(DirstateMapError::PathNotFound(b"a/b/".to_vec())),
+ map.delete_path(b"a/b/")
+ );
+
+ assert_eq!(2, *map.inner.get(&b"a".to_vec()).unwrap());
+ assert_eq!(1, *map.inner.get(&b"a/c".to_vec()).unwrap());
+ eprintln!("{:?}", map);
+ assert_eq!(Ok(()), map.delete_path(b"a/"));
+ eprintln!("{:?}", map);
+
+ assert_eq!(Ok(()), map.delete_path(b"a/c/"));
+ assert_eq!(
+ Err(DirstateMapError::PathNotFound(b"a/c/".to_vec())),
+ map.delete_path(b"a/c/")
+ );
+ }
+
+ #[test]
+ fn test_add_path_empty_path() {
+ let mut map = DirsMultiset::new(DirsIterable::Manifest(vec![]), None);
+ let path = b"";
+ map.add_path(path);
+
+ assert_eq!(1, map.len());
+ }
+
+ #[test]
+ fn test_add_path_successful() {
+ let mut map = DirsMultiset::new(DirsIterable::Manifest(vec![]), None);
+
+ map.add_path(b"a/");
+ assert_eq!(1, *map.inner.get(&b"a".to_vec()).unwrap());
+ assert_eq!(1, *map.inner.get(&Vec::new()).unwrap());
+ assert_eq!(2, map.len());
+
+ // Non directory should be ignored
+ map.add_path(b"a");
+ assert_eq!(1, *map.inner.get(&b"a".to_vec()).unwrap());
+ assert_eq!(2, map.len());
+
+ // Non directory will still add its base
+ map.add_path(b"a/b");
+ assert_eq!(2, *map.inner.get(&b"a".to_vec()).unwrap());
+ assert_eq!(2, map.len());
+
+ // Duplicate path works
+ map.add_path(b"a/");
+ assert_eq!(3, *map.inner.get(&b"a".to_vec()).unwrap());
+
+ // Nested dir adds to its base
+ map.add_path(b"a/b/");
+ assert_eq!(4, *map.inner.get(&b"a".to_vec()).unwrap());
+ assert_eq!(1, *map.inner.get(&b"a/b".to_vec()).unwrap());
+
+ // but not its base's base, because it already existed
+ map.add_path(b"a/b/c/");
+ assert_eq!(4, *map.inner.get(&b"a".to_vec()).unwrap());
+ assert_eq!(2, *map.inner.get(&b"a/b".to_vec()).unwrap());
+
+ map.add_path(b"a/c/");
+ assert_eq!(1, *map.inner.get(&b"a/c".to_vec()).unwrap());
+
+ let expected = DirsMultiset {
+ inner: [("", 2), ("a", 5), ("a/b", 2), ("a/b/c", 1), ("a/c", 1)]
+ .iter()
+ .map(|(k, v)| (k.as_bytes().to_vec(), *v))
+ .collect(),
+ };
+ assert_eq!(map, expected);
+ }
+
+ #[test]
+ fn test_dirsmultiset_new_empty() {
+ use DirsIterable::{Dirstate, Manifest};
+
+ let new = DirsMultiset::new(Manifest(vec![]), None);
+ let expected = DirsMultiset {
+ inner: HashMap::new(),
+ };
+ assert_eq!(expected, new);
+
+ let new = DirsMultiset::new(Dirstate(vec![]), None);
+ let expected = DirsMultiset {
+ inner: HashMap::new(),
+ };
+ assert_eq!(expected, new);
+ }
+
+ #[test]
+ fn test_dirsmultiset_new_no_skip() {
+ use DirsIterable::{Dirstate, Manifest};
+
+ let input_vec = ["a/", "b/", "a/c", "a/d/"]
+ .iter()
+ .map(|e| e.as_bytes().to_vec())
+ .collect();
+ let expected_inner = [("", 2), ("a", 3), ("b", 1), ("a/d", 1)]
+ .iter()
+ .map(|(k, v)| (k.as_bytes().to_vec(), *v))
+ .collect();
+
+ let new = DirsMultiset::new(Manifest(input_vec), None);
+ let expected = DirsMultiset {
+ inner: expected_inner,
+ };
+ assert_eq!(expected, new);
+
+ let input_map = ["a/", "b/", "a/c", "a/d/"]
+ .iter()
+ .map(|f| {
+ (
+ f.as_bytes().to_vec(),
+ DirstateEntry {
+ state: 0,
+ mode: 0,
+ mtime: 0,
+ size: 0,
+ },
+ )
+ })
+ .collect();
+ let expected_inner = [("", 2), ("a", 3), ("b", 1), ("a/d", 1)]
+ .iter()
+ .map(|(k, v)| (k.as_bytes().to_vec(), *v))
+ .collect();
+
+ let new = DirsMultiset::new(Dirstate(input_map), None);
+ let expected = DirsMultiset {
+ inner: expected_inner,
+ };
+ assert_eq!(expected, new);
+ }
+
+ #[test]
+ fn test_dirsmultiset_new_skip() {
+ use DirsIterable::{Dirstate, Manifest};
+
+ let input_vec = ["a/", "b/", "a/c", "a/d/"]
+ .iter()
+ .map(|e| e.as_bytes().to_vec())
+ .collect();
+ let expected_inner = [("", 2), ("a", 3), ("b", 1), ("a/d", 1)]
+ .iter()
+ .map(|(k, v)| (k.as_bytes().to_vec(), *v))
+ .collect();
+
+ let new = DirsMultiset::new(Manifest(input_vec), Some('n' as i8));
+ let expected = DirsMultiset {
+ inner: expected_inner,
+ };
+ // Skip does not affect a manifest
+ assert_eq!(expected, new);
+
+ let input_map =
+ [("a/", 'n'), ("a/b/", 'n'), ("a/c", 'r'), ("a/d/", 'm')]
+ .iter()
+ .map(|(f, state)| {
+ (
+ f.as_bytes().to_vec(),
+ DirstateEntry {
+ state: *state as i8,
+ mode: 0,
+ mtime: 0,
+ size: 0,
+ },
+ )
+ })
+ .collect();
+
+ // "a" incremented with "a/c" and "a/d/"
+ let expected_inner = [("", 1), ("a", 2), ("a/d", 1)]
+ .iter()
+ .map(|(k, v)| (k.as_bytes().to_vec(), *v))
+ .collect();
+
+ let new = DirsMultiset::new(Dirstate(input_map), Some('n' as i8));
+ let expected = DirsMultiset {
+ inner: expected_inner,
+ };
+ assert_eq!(expected, new);
+ }
+
+}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/rust/hg-core/src/dirstate/parsers.rs Mon Jul 22 14:00:33 2019 -0400
@@ -0,0 +1,388 @@
+// Copyright 2019 Raphaël Gomès <rgomes@octobus.net>
+//
+// This software may be used and distributed according to the terms of the
+// GNU General Public License version 2 or any later version.
+
+use crate::{
+ CopyVec, CopyVecEntry, DirstateEntry, DirstatePackError, DirstateParents,
+ DirstateParseError, DirstateVec,
+};
+use byteorder::{BigEndian, ReadBytesExt, WriteBytesExt};
+use std::collections::HashMap;
+use std::io::Cursor;
+
+/// Parents are stored in the dirstate as byte hashes.
+const PARENT_SIZE: usize = 20;
+/// Dirstate entries have a static part of 8 + 32 + 32 + 32 + 32 bits.
+const MIN_ENTRY_SIZE: usize = 17;
+
+pub fn parse_dirstate(
+ contents: &[u8],
+) -> Result<(DirstateParents, DirstateVec, CopyVec), DirstateParseError> {
+ if contents.len() < PARENT_SIZE * 2 {
+ return Err(DirstateParseError::TooLittleData);
+ }
+
+ let mut dirstate_vec = vec![];
+ let mut copies = vec![];
+ let mut curr_pos = PARENT_SIZE * 2;
+ let parents = DirstateParents {
+ p1: &contents[..PARENT_SIZE],
+ p2: &contents[PARENT_SIZE..curr_pos],
+ };
+
+ while curr_pos < contents.len() {
+ if curr_pos + MIN_ENTRY_SIZE > contents.len() {
+ return Err(DirstateParseError::Overflow);
+ }
+ let entry_bytes = &contents[curr_pos..];
+
+ let mut cursor = Cursor::new(entry_bytes);
+ let state = cursor.read_i8()?;
+ let mode = cursor.read_i32::<BigEndian>()?;
+ let size = cursor.read_i32::<BigEndian>()?;
+ let mtime = cursor.read_i32::<BigEndian>()?;
+ let path_len = cursor.read_i32::<BigEndian>()? as usize;
+
+ if path_len > contents.len() - curr_pos {
+ return Err(DirstateParseError::Overflow);
+ }
+
+ // Slice instead of allocating a Vec needed for `read_exact`
+ let path = &entry_bytes[MIN_ENTRY_SIZE..MIN_ENTRY_SIZE + (path_len)];
+
+ let (path, copy) = match memchr::memchr(0, path) {
+ None => (path, None),
+ Some(i) => (&path[..i], Some(&path[(i + 1)..])),
+ };
+
+ if let Some(copy_path) = copy {
+ copies.push(CopyVecEntry { path, copy_path });
+ };
+ dirstate_vec.push((
+ path.to_owned(),
+ DirstateEntry {
+ state,
+ mode,
+ size,
+ mtime,
+ },
+ ));
+ curr_pos = curr_pos + MIN_ENTRY_SIZE + (path_len);
+ }
+
+ Ok((parents, dirstate_vec, copies))
+}
+
+pub fn pack_dirstate(
+ dirstate_vec: &DirstateVec,
+ copymap: &HashMap<Vec<u8>, Vec<u8>>,
+ parents: DirstateParents,
+ now: i32,
+) -> Result<(Vec<u8>, DirstateVec), DirstatePackError> {
+ if parents.p1.len() != PARENT_SIZE || parents.p2.len() != PARENT_SIZE {
+ return Err(DirstatePackError::CorruptedParent);
+ }
+
+ let expected_size: usize = dirstate_vec
+ .iter()
+ .map(|(ref filename, _)| {
+ let mut length = MIN_ENTRY_SIZE + filename.len();
+ if let Some(ref copy) = copymap.get(filename) {
+ length += copy.len() + 1;
+ }
+ length
+ })
+ .sum();
+ let expected_size = expected_size + PARENT_SIZE * 2;
+
+ let mut packed = Vec::with_capacity(expected_size);
+ let mut new_dirstate_vec = vec![];
+
+ packed.extend(parents.p1);
+ packed.extend(parents.p2);
+
+ for (ref filename, entry) in dirstate_vec {
+ let mut new_filename: Vec<u8> = filename.to_owned();
+ let mut new_mtime: i32 = entry.mtime;
+ if entry.state == 'n' as i8 && entry.mtime == now.into() {
+ // The file was last modified "simultaneously" with the current
+ // write to dirstate (i.e. within the same second for file-
+ // systems with a granularity of 1 sec). This commonly happens
+ // for at least a couple of files on 'update'.
+ // The user could change the file without changing its size
+ // within the same second. Invalidate the file's mtime in
+ // dirstate, forcing future 'status' calls to compare the
+ // contents of the file if the size is the same. This prevents
+ // mistakenly treating such files as clean.
+ new_mtime = -1;
+ new_dirstate_vec.push((
+ filename.to_owned(),
+ DirstateEntry {
+ mtime: new_mtime,
+ ..*entry
+ },
+ ));
+ }
+
+ if let Some(copy) = copymap.get(filename) {
+ new_filename.push('\0' as u8);
+ new_filename.extend(copy);
+ }
+
+ packed.write_i8(entry.state)?;
+ packed.write_i32::<BigEndian>(entry.mode)?;
+ packed.write_i32::<BigEndian>(entry.size)?;
+ packed.write_i32::<BigEndian>(new_mtime)?;
+ packed.write_i32::<BigEndian>(new_filename.len() as i32)?;
+ packed.extend(new_filename)
+ }
+
+ if packed.len() != expected_size {
+ return Err(DirstatePackError::BadSize(expected_size, packed.len()));
+ }
+
+ Ok((packed, new_dirstate_vec))
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn test_pack_dirstate_empty() {
+ let dirstate_vec: DirstateVec = vec![];
+ let copymap = HashMap::new();
+ let parents = DirstateParents {
+ p1: b"12345678910111213141",
+ p2: b"00000000000000000000",
+ };
+ let now: i32 = 15000000;
+ let expected =
+ (b"1234567891011121314100000000000000000000".to_vec(), vec![]);
+
+ assert_eq!(
+ expected,
+ pack_dirstate(&dirstate_vec, ©map, parents, now).unwrap()
+ );
+ }
+ #[test]
+ fn test_pack_dirstate_one_entry() {
+ let dirstate_vec: DirstateVec = vec![(
+ vec!['f' as u8, '1' as u8],
+ DirstateEntry {
+ state: 'n' as i8,
+ mode: 0o644,
+ size: 0,
+ mtime: 791231220,
+ },
+ )];
+ let copymap = HashMap::new();
+ let parents = DirstateParents {
+ p1: b"12345678910111213141",
+ p2: b"00000000000000000000",
+ };
+ let now: i32 = 15000000;
+ let expected = (
+ [
+ 49, 50, 51, 52, 53, 54, 55, 56, 57, 49, 48, 49, 49, 49, 50,
+ 49, 51, 49, 52, 49, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48,
+ 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 110, 0, 0, 1, 164, 0,
+ 0, 0, 0, 47, 41, 58, 244, 0, 0, 0, 2, 102, 49,
+ ]
+ .to_vec(),
+ vec![],
+ );
+
+ assert_eq!(
+ expected,
+ pack_dirstate(&dirstate_vec, ©map, parents, now).unwrap()
+ );
+ }
+ #[test]
+ fn test_pack_dirstate_one_entry_with_copy() {
+ let dirstate_vec: DirstateVec = vec![(
+ b"f1".to_vec(),
+ DirstateEntry {
+ state: 'n' as i8,
+ mode: 0o644,
+ size: 0,
+ mtime: 791231220,
+ },
+ )];
+ let mut copymap = HashMap::new();
+ copymap.insert(b"f1".to_vec(), b"copyname".to_vec());
+ let parents = DirstateParents {
+ p1: b"12345678910111213141",
+ p2: b"00000000000000000000",
+ };
+ let now: i32 = 15000000;
+ let expected = (
+ [
+ 49, 50, 51, 52, 53, 54, 55, 56, 57, 49, 48, 49, 49, 49, 50,
+ 49, 51, 49, 52, 49, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48,
+ 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 110, 0, 0, 1, 164, 0,
+ 0, 0, 0, 47, 41, 58, 244, 0, 0, 0, 11, 102, 49, 0, 99, 111,
+ 112, 121, 110, 97, 109, 101,
+ ]
+ .to_vec(),
+ vec![],
+ );
+
+ assert_eq!(
+ expected,
+ pack_dirstate(&dirstate_vec, ©map, parents, now).unwrap()
+ );
+ }
+
+ #[test]
+ fn test_parse_pack_one_entry_with_copy() {
+ let dirstate_vec: DirstateVec = vec![(
+ b"f1".to_vec(),
+ DirstateEntry {
+ state: 'n' as i8,
+ mode: 0o644,
+ size: 0,
+ mtime: 791231220,
+ },
+ )];
+ let mut copymap = HashMap::new();
+ copymap.insert(b"f1".to_vec(), b"copyname".to_vec());
+ let parents = DirstateParents {
+ p1: b"12345678910111213141",
+ p2: b"00000000000000000000",
+ };
+ let now: i32 = 15000000;
+ let result =
+ pack_dirstate(&dirstate_vec, ©map, parents, now).unwrap();
+
+ assert_eq!(
+ (
+ parents,
+ dirstate_vec,
+ copymap
+ .iter()
+ .map(|(k, v)| CopyVecEntry {
+ path: k.as_slice(),
+ copy_path: v.as_slice()
+ })
+ .collect()
+ ),
+ parse_dirstate(result.0.as_slice()).unwrap()
+ )
+ }
+
+ #[test]
+ fn test_parse_pack_multiple_entries_with_copy() {
+ let dirstate_vec: DirstateVec = vec![
+ (
+ b"f1".to_vec(),
+ DirstateEntry {
+ state: 'n' as i8,
+ mode: 0o644,
+ size: 0,
+ mtime: 791231220,
+ },
+ ),
+ (
+ b"f2".to_vec(),
+ DirstateEntry {
+ state: 'm' as i8,
+ mode: 0o777,
+ size: 1000,
+ mtime: 791231220,
+ },
+ ),
+ (
+ b"f3".to_vec(),
+ DirstateEntry {
+ state: 'r' as i8,
+ mode: 0o644,
+ size: 234553,
+ mtime: 791231220,
+ },
+ ),
+ (
+ b"f4\xF6".to_vec(),
+ DirstateEntry {
+ state: 'a' as i8,
+ mode: 0o644,
+ size: -1,
+ mtime: -1,
+ },
+ ),
+ ];
+ let mut copymap = HashMap::new();
+ copymap.insert(b"f1".to_vec(), b"copyname".to_vec());
+ copymap.insert(b"f4\xF6".to_vec(), b"copyname2".to_vec());
+ let parents = DirstateParents {
+ p1: b"12345678910111213141",
+ p2: b"00000000000000000000",
+ };
+ let now: i32 = 15000000;
+ let result =
+ pack_dirstate(&dirstate_vec, ©map, parents, now).unwrap();
+
+ assert_eq!(
+ (parents, dirstate_vec, copymap),
+ parse_dirstate(result.0.as_slice())
+ .and_then(|(p, dvec, cvec)| Ok((
+ p,
+ dvec,
+ cvec.iter()
+ .map(|entry| (
+ entry.path.to_vec(),
+ entry.copy_path.to_vec()
+ ))
+ .collect()
+ )))
+ .unwrap()
+ )
+ }
+
+ #[test]
+ /// https://www.mercurial-scm.org/repo/hg/rev/af3f26b6bba4
+ fn test_parse_pack_one_entry_with_copy_and_time_conflict() {
+ let dirstate_vec: DirstateVec = vec![(
+ b"f1".to_vec(),
+ DirstateEntry {
+ state: 'n' as i8,
+ mode: 0o644,
+ size: 0,
+ mtime: 15000000,
+ },
+ )];
+ let mut copymap = HashMap::new();
+ copymap.insert(b"f1".to_vec(), b"copyname".to_vec());
+ let parents = DirstateParents {
+ p1: b"12345678910111213141",
+ p2: b"00000000000000000000",
+ };
+ let now: i32 = 15000000;
+ let result =
+ pack_dirstate(&dirstate_vec, ©map, parents, now).unwrap();
+
+ assert_eq!(
+ (
+ parents,
+ vec![(
+ b"f1".to_vec(),
+ DirstateEntry {
+ state: 'n' as i8,
+ mode: 0o644,
+ size: 0,
+ mtime: -1
+ }
+ )],
+ copymap
+ .iter()
+ .map(|(k, v)| CopyVecEntry {
+ path: k.as_slice(),
+ copy_path: v.as_slice()
+ })
+ .collect()
+ ),
+ parse_dirstate(result.0.as_slice()).unwrap()
+ )
+ }
+}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/rust/hg-core/src/discovery.rs Mon Jul 22 14:00:33 2019 -0400
@@ -0,0 +1,209 @@
+// discovery.rs
+//
+// Copyright 2019 Georges Racinet <georges.racinet@octobus.net>
+//
+// This software may be used and distributed according to the terms of the
+// GNU General Public License version 2 or any later version.
+
+//! Discovery operations
+//!
+//! This is a Rust counterpart to the `partialdiscovery` class of
+//! `mercurial.setdiscovery`
+
+use super::{Graph, GraphError, Revision};
+use crate::ancestors::MissingAncestors;
+use crate::dagops;
+use std::collections::HashSet;
+
+pub struct PartialDiscovery<G: Graph + Clone> {
+ target_heads: Option<Vec<Revision>>,
+ graph: G, // plays the role of self._repo
+ common: MissingAncestors<G>,
+ undecided: Option<HashSet<Revision>>,
+ missing: HashSet<Revision>,
+}
+
+pub struct DiscoveryStats {
+ pub undecided: Option<usize>,
+}
+
+impl<G: Graph + Clone> PartialDiscovery<G> {
+ /// Create a PartialDiscovery object, with the intent
+ /// of comparing our `::<target_heads>` revset to the contents of another
+ /// repo.
+ ///
+ /// For now `target_heads` is passed as a vector, and will be used
+ /// at the first call to `ensure_undecided()`.
+ ///
+ /// If we want to make the signature more flexible,
+ /// we'll have to make it a type argument of `PartialDiscovery` or a trait
+ /// object since we'll keep it in the meanwhile
+ pub fn new(graph: G, target_heads: Vec<Revision>) -> Self {
+ PartialDiscovery {
+ undecided: None,
+ target_heads: Some(target_heads),
+ graph: graph.clone(),
+ common: MissingAncestors::new(graph, vec![]),
+ missing: HashSet::new(),
+ }
+ }
+
+ /// Register revisions known as being common
+ pub fn add_common_revisions(
+ &mut self,
+ common: impl IntoIterator<Item = Revision>,
+ ) -> Result<(), GraphError> {
+ self.common.add_bases(common);
+ if let Some(ref mut undecided) = self.undecided {
+ self.common.remove_ancestors_from(undecided)?;
+ }
+ Ok(())
+ }
+
+ /// Register revisions known as being missing
+ pub fn add_missing_revisions(
+ &mut self,
+ missing: impl IntoIterator<Item = Revision>,
+ ) -> Result<(), GraphError> {
+ self.ensure_undecided()?;
+ let range = dagops::range(
+ &self.graph,
+ missing,
+ self.undecided.as_ref().unwrap().iter().cloned(),
+ )?;
+ let undecided_mut = self.undecided.as_mut().unwrap();
+ for missrev in range {
+ self.missing.insert(missrev);
+ undecided_mut.remove(&missrev);
+ }
+ Ok(())
+ }
+
+ /// Do we have any information about the peer?
+ pub fn has_info(&self) -> bool {
+ self.common.has_bases()
+ }
+
+ /// Did we acquire full knowledge of our Revisions that the peer has?
+ pub fn is_complete(&self) -> bool {
+ self.undecided.as_ref().map_or(false, |s| s.is_empty())
+ }
+
+ /// Return the heads of the currently known common set of revisions.
+ ///
+ /// If the discovery process is not complete (see `is_complete()`), the
+ /// caller must be aware that this is an intermediate state.
+ ///
+ /// On the other hand, if it is complete, then this is currently
+ /// the only way to retrieve the end results of the discovery process.
+ ///
+ /// We may introduce in the future an `into_common_heads` call that
+ /// would be more appropriate for normal Rust callers, dropping `self`
+ /// if it is complete.
+ pub fn common_heads(&self) -> Result<HashSet<Revision>, GraphError> {
+ self.common.bases_heads()
+ }
+
+ /// Force first computation of `self.undecided`
+ ///
+ /// After this, `self.undecided.as_ref()` and `.as_mut()` can be
+ /// unwrapped to get workable immutable or mutable references without
+ /// any panic.
+ ///
+ /// This is an imperative call instead of an access with added lazyness
+ /// to reduce easily the scope of mutable borrow for the caller,
+ /// compared to undecided(&'a mut self) -> &'a… that would keep it
+ /// as long as the resulting immutable one.
+ fn ensure_undecided(&mut self) -> Result<(), GraphError> {
+ if self.undecided.is_some() {
+ return Ok(());
+ }
+ let tgt = self.target_heads.take().unwrap();
+ self.undecided =
+ Some(self.common.missing_ancestors(tgt)?.into_iter().collect());
+ Ok(())
+ }
+
+ /// Provide statistics about the current state of the discovery process
+ pub fn stats(&self) -> DiscoveryStats {
+ DiscoveryStats {
+ undecided: self.undecided.as_ref().map(|s| s.len()),
+ }
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use crate::testing::SampleGraph;
+
+ /// A PartialDiscovery as for pushing all the heads of `SampleGraph`
+ fn full_disco() -> PartialDiscovery<SampleGraph> {
+ PartialDiscovery::new(SampleGraph, vec![10, 11, 12, 13])
+ }
+
+ fn sorted_undecided(
+ disco: &PartialDiscovery<SampleGraph>,
+ ) -> Vec<Revision> {
+ let mut as_vec: Vec<Revision> =
+ disco.undecided.as_ref().unwrap().iter().cloned().collect();
+ as_vec.sort();
+ as_vec
+ }
+
+ fn sorted_missing(disco: &PartialDiscovery<SampleGraph>) -> Vec<Revision> {
+ let mut as_vec: Vec<Revision> =
+ disco.missing.iter().cloned().collect();
+ as_vec.sort();
+ as_vec
+ }
+
+ fn sorted_common_heads(
+ disco: &PartialDiscovery<SampleGraph>,
+ ) -> Result<Vec<Revision>, GraphError> {
+ let mut as_vec: Vec<Revision> =
+ disco.common_heads()?.iter().cloned().collect();
+ as_vec.sort();
+ Ok(as_vec)
+ }
+
+ #[test]
+ fn test_add_common_get_undecided() -> Result<(), GraphError> {
+ let mut disco = full_disco();
+ assert_eq!(disco.undecided, None);
+ assert!(!disco.has_info());
+ assert_eq!(disco.stats().undecided, None);
+
+ disco.add_common_revisions(vec![11, 12])?;
+ assert!(disco.has_info());
+ assert!(!disco.is_complete());
+ assert!(disco.missing.is_empty());
+
+ // add_common_revisions did not trigger a premature computation
+ // of `undecided`, let's check that and ask for them
+ assert_eq!(disco.undecided, None);
+ disco.ensure_undecided()?;
+ assert_eq!(sorted_undecided(&disco), vec![5, 8, 10, 13]);
+ assert_eq!(disco.stats().undecided, Some(4));
+ Ok(())
+ }
+
+ /// in this test, we pretend that our peer misses exactly (8+10)::
+ /// and we're comparing all our repo to it (as in a bare push)
+ #[test]
+ fn test_discovery() -> Result<(), GraphError> {
+ let mut disco = full_disco();
+ disco.add_common_revisions(vec![11, 12])?;
+ disco.add_missing_revisions(vec![8, 10])?;
+ assert_eq!(sorted_undecided(&disco), vec![5]);
+ assert_eq!(sorted_missing(&disco), vec![8, 10, 13]);
+ assert!(!disco.is_complete());
+
+ disco.add_common_revisions(vec![5])?;
+ assert_eq!(sorted_undecided(&disco), vec![]);
+ assert_eq!(sorted_missing(&disco), vec![8, 10, 13]);
+ assert!(disco.is_complete());
+ assert_eq!(sorted_common_heads(&disco)?, vec![5, 11, 12]);
+ Ok(())
+ }
+}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/rust/hg-core/src/filepatterns.rs Mon Jul 22 14:00:33 2019 -0400
@@ -0,0 +1,373 @@
+use crate::{
+ utils::{files::get_path_from_bytes, SliceExt},
+ LineNumber, PatternError, PatternFileError,
+};
+use lazy_static::lazy_static;
+use regex::bytes::{NoExpand, Regex};
+use std::collections::HashMap;
+use std::fs::File;
+use std::io::Read;
+use std::vec::Vec;
+
+lazy_static! {
+ static ref RE_ESCAPE: Vec<Vec<u8>> = {
+ let mut v: Vec<Vec<u8>> = (0..=255).map(|byte| vec![byte]).collect();
+ let to_escape = b"()[]{}?*+-|^$\\.&~# \t\n\r\x0b\x0c";
+ for byte in to_escape {
+ v[*byte as usize].insert(0, b'\\');
+ }
+ v
+ };
+}
+
+/// These are matched in order
+const GLOB_REPLACEMENTS: &[(&[u8], &[u8])] =
+ &[(b"*/", b"(?:.*/)?"), (b"*", b".*"), (b"", b"[^/]*")];
+
+#[derive(Debug, Copy, Clone, PartialEq, Eq)]
+pub enum PatternSyntax {
+ Regexp,
+ /// Glob that matches at the front of the path
+ RootGlob,
+ /// Glob that matches at any suffix of the path (still anchored at slashes)
+ Glob,
+ Path,
+ RelPath,
+ RelGlob,
+ RelRegexp,
+ RootFiles,
+}
+
+/// Transforms a glob pattern into a regex
+fn glob_to_re(pat: &[u8]) -> Vec<u8> {
+ let mut input = pat;
+ let mut res: Vec<u8> = vec![];
+ let mut group_depth = 0;
+
+ while let Some((c, rest)) = input.split_first() {
+ input = rest;
+
+ match c {
+ b'*' => {
+ for (source, repl) in GLOB_REPLACEMENTS {
+ if input.starts_with(source) {
+ input = &input[source.len()..];
+ res.extend(*repl);
+ break;
+ }
+ }
+ }
+ b'?' => res.extend(b"."),
+ b'[' => {
+ match input.iter().skip(1).position(|b| *b == b']') {
+ None => res.extend(b"\\["),
+ Some(end) => {
+ // Account for the one we skipped
+ let end = end + 1;
+
+ res.extend(b"[");
+
+ for (i, b) in input[..end].iter().enumerate() {
+ if *b == b'!' && i == 0 {
+ res.extend(b"^")
+ } else if *b == b'^' && i == 0 {
+ res.extend(b"\\^")
+ } else if *b == b'\\' {
+ res.extend(b"\\\\")
+ } else {
+ res.push(*b)
+ }
+ }
+ res.extend(b"]");
+ input = &input[end + 1..];
+ }
+ }
+ }
+ b'{' => {
+ group_depth += 1;
+ res.extend(b"(?:")
+ }
+ b'}' if group_depth > 0 => {
+ group_depth -= 1;
+ res.extend(b")");
+ }
+ b',' if group_depth > 0 => res.extend(b"|"),
+ b'\\' => {
+ let c = {
+ if let Some((c, rest)) = input.split_first() {
+ input = rest;
+ c
+ } else {
+ c
+ }
+ };
+ res.extend(&RE_ESCAPE[*c as usize])
+ }
+ _ => res.extend(&RE_ESCAPE[*c as usize]),
+ }
+ }
+ res
+}
+
+fn escape_pattern(pattern: &[u8]) -> Vec<u8> {
+ pattern
+ .iter()
+ .flat_map(|c| RE_ESCAPE[*c as usize].clone())
+ .collect()
+}
+
+fn parse_pattern_syntax(kind: &[u8]) -> Result<PatternSyntax, PatternError> {
+ match kind {
+ b"re" => Ok(PatternSyntax::Regexp),
+ b"path" => Ok(PatternSyntax::Path),
+ b"relpath" => Ok(PatternSyntax::RelPath),
+ b"rootfilesin" => Ok(PatternSyntax::RootFiles),
+ b"relglob" => Ok(PatternSyntax::RelGlob),
+ b"relre" => Ok(PatternSyntax::RelRegexp),
+ b"glob" => Ok(PatternSyntax::Glob),
+ b"rootglob" => Ok(PatternSyntax::RootGlob),
+ _ => Err(PatternError::UnsupportedSyntax(
+ String::from_utf8_lossy(kind).to_string(),
+ )),
+ }
+}
+
+/// Builds the regex that corresponds to the given pattern.
+/// If within a `syntax: regexp` context, returns the pattern,
+/// otherwise, returns the corresponding regex.
+fn _build_single_regex(
+ syntax: PatternSyntax,
+ pattern: &[u8],
+ globsuffix: &[u8],
+) -> Vec<u8> {
+ if pattern.is_empty() {
+ return vec![];
+ }
+ match syntax {
+ PatternSyntax::Regexp => pattern.to_owned(),
+ PatternSyntax::RelRegexp => {
+ if pattern[0] == b'^' {
+ return pattern.to_owned();
+ }
+ let mut res = b".*".to_vec();
+ res.extend(pattern);
+ res
+ }
+ PatternSyntax::Path | PatternSyntax::RelPath => {
+ if pattern == b"." {
+ return vec![];
+ }
+ let mut pattern = escape_pattern(pattern);
+ pattern.extend(b"(?:/|$)");
+ pattern
+ }
+ PatternSyntax::RootFiles => {
+ let mut res = if pattern == b"." {
+ vec![]
+ } else {
+ // Pattern is a directory name.
+ let mut as_vec: Vec<u8> = escape_pattern(pattern);
+ as_vec.push(b'/');
+ as_vec
+ };
+
+ // Anything after the pattern must be a non-directory.
+ res.extend(b"[^/]+$");
+ res
+ }
+ PatternSyntax::Glob
+ | PatternSyntax::RelGlob
+ | PatternSyntax::RootGlob => {
+ let mut res: Vec<u8> = vec![];
+ if syntax == PatternSyntax::RelGlob {
+ res.extend(b"(?:|.*/)");
+ }
+
+ res.extend(glob_to_re(pattern));
+ res.extend(globsuffix.iter());
+ res
+ }
+ }
+}
+
+const GLOB_SPECIAL_CHARACTERS: [u8; 7] =
+ [b'*', b'?', b'[', b']', b'{', b'}', b'\\'];
+
+/// Wrapper function to `_build_single_regex` that short-circuits 'exact' globs
+/// that don't need to be transformed into a regex.
+pub fn build_single_regex(
+ kind: &[u8],
+ pat: &[u8],
+ globsuffix: &[u8],
+) -> Result<Vec<u8>, PatternError> {
+ let enum_kind = parse_pattern_syntax(kind)?;
+ if enum_kind == PatternSyntax::RootGlob
+ && !pat.iter().any(|b| GLOB_SPECIAL_CHARACTERS.contains(b))
+ {
+ let mut escaped = escape_pattern(pat);
+ escaped.extend(b"(?:/|$)");
+ Ok(escaped)
+ } else {
+ Ok(_build_single_regex(enum_kind, pat, globsuffix))
+ }
+}
+
+lazy_static! {
+ static ref SYNTAXES: HashMap<&'static [u8], &'static [u8]> = {
+ let mut m = HashMap::new();
+
+ m.insert(b"re".as_ref(), b"relre:".as_ref());
+ m.insert(b"regexp".as_ref(), b"relre:".as_ref());
+ m.insert(b"glob".as_ref(), b"relglob:".as_ref());
+ m.insert(b"rootglob".as_ref(), b"rootglob:".as_ref());
+ m.insert(b"include".as_ref(), b"include".as_ref());
+ m.insert(b"subinclude".as_ref(), b"subinclude".as_ref());
+ m
+ };
+}
+
+pub type PatternTuple = (Vec<u8>, LineNumber, Vec<u8>);
+type WarningTuple = (Vec<u8>, Vec<u8>);
+
+pub fn parse_pattern_file_contents(
+ lines: &[u8],
+ file_path: &[u8],
+ warn: bool,
+) -> (Vec<PatternTuple>, Vec<WarningTuple>) {
+ let comment_regex = Regex::new(r"((?:^|[^\\])(?:\\\\)*)#.*").unwrap();
+ let comment_escape_regex = Regex::new(r"\\#").unwrap();
+ let mut inputs: Vec<PatternTuple> = vec![];
+ let mut warnings: Vec<WarningTuple> = vec![];
+
+ let mut current_syntax = b"relre:".as_ref();
+
+ for (line_number, mut line) in lines.split(|c| *c == b'\n').enumerate() {
+ let line_number = line_number + 1;
+
+ let line_buf;
+ if line.contains(&b'#') {
+ if let Some(cap) = comment_regex.captures(line) {
+ line = &line[..cap.get(1).unwrap().end()]
+ }
+ line_buf = comment_escape_regex.replace_all(line, NoExpand(b"#"));
+ line = &line_buf;
+ }
+
+ let mut line = line.trim_end();
+
+ if line.is_empty() {
+ continue;
+ }
+
+ if line.starts_with(b"syntax:") {
+ let syntax = line[b"syntax:".len()..].trim();
+
+ if let Some(rel_syntax) = SYNTAXES.get(syntax) {
+ current_syntax = rel_syntax;
+ } else if warn {
+ warnings.push((file_path.to_owned(), syntax.to_owned()));
+ }
+ continue;
+ }
+
+ let mut line_syntax: &[u8] = ¤t_syntax;
+
+ for (s, rels) in SYNTAXES.iter() {
+ if line.starts_with(rels) {
+ line_syntax = rels;
+ line = &line[rels.len()..];
+ break;
+ } else if line.starts_with(&[s, b":".as_ref()].concat()) {
+ line_syntax = rels;
+ line = &line[s.len() + 1..];
+ break;
+ }
+ }
+
+ inputs.push((
+ [line_syntax, line].concat(),
+ line_number,
+ line.to_owned(),
+ ));
+ }
+ (inputs, warnings)
+}
+
+pub fn read_pattern_file(
+ file_path: &[u8],
+ warn: bool,
+) -> Result<(Vec<PatternTuple>, Vec<WarningTuple>), PatternFileError> {
+ let mut f = File::open(get_path_from_bytes(file_path))?;
+ let mut contents = Vec::new();
+
+ f.read_to_end(&mut contents)?;
+
+ Ok(parse_pattern_file_contents(&contents, file_path, warn))
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn escape_pattern_test() {
+ let untouched = br#"!"%',/0123456789:;<=>@ABCDEFGHIJKLMNOPQRSTUVWXYZ_`abcdefghijklmnopqrstuvwxyz"#;
+ assert_eq!(escape_pattern(untouched), untouched.to_vec());
+ // All escape codes
+ assert_eq!(
+ escape_pattern(br#"()[]{}?*+-|^$\\.&~# \t\n\r\v\f"#),
+ br#"\(\)\[\]\{\}\?\*\+\-\|\^\$\\\\\.\&\~\#\ \\t\\n\\r\\v\\f"#
+ .to_vec()
+ );
+ }
+
+ #[test]
+ fn glob_test() {
+ assert_eq!(glob_to_re(br#"?"#), br#"."#);
+ assert_eq!(glob_to_re(br#"*"#), br#"[^/]*"#);
+ assert_eq!(glob_to_re(br#"**"#), br#".*"#);
+ assert_eq!(glob_to_re(br#"**/a"#), br#"(?:.*/)?a"#);
+ assert_eq!(glob_to_re(br#"a/**/b"#), br#"a/(?:.*/)?b"#);
+ assert_eq!(glob_to_re(br#"[a*?!^][^b][!c]"#), br#"[a*?!^][\^b][^c]"#);
+ assert_eq!(glob_to_re(br#"{a,b}"#), br#"(?:a|b)"#);
+ assert_eq!(glob_to_re(br#".\*\?"#), br#"\.\*\?"#);
+ }
+
+ #[test]
+ fn test_parse_pattern_file_contents() {
+ let lines = b"syntax: glob\n*.elc";
+
+ assert_eq!(
+ vec![(b"relglob:*.elc".to_vec(), 2, b"*.elc".to_vec())],
+ parse_pattern_file_contents(lines, b"file_path", false).0,
+ );
+
+ let lines = b"syntax: include\nsyntax: glob";
+
+ assert_eq!(
+ parse_pattern_file_contents(lines, b"file_path", false).0,
+ vec![]
+ );
+ let lines = b"glob:**.o";
+ assert_eq!(
+ parse_pattern_file_contents(lines, b"file_path", false).0,
+ vec![(b"relglob:**.o".to_vec(), 1, b"**.o".to_vec())]
+ );
+ }
+
+ #[test]
+ fn test_build_single_regex_shortcut() {
+ assert_eq!(
+ br"(?:/|$)".to_vec(),
+ build_single_regex(b"rootglob", b"", b"").unwrap()
+ );
+ assert_eq!(
+ br"whatever(?:/|$)".to_vec(),
+ build_single_regex(b"rootglob", b"whatever", b"").unwrap()
+ );
+ assert_eq!(
+ br"[^/]*\.o".to_vec(),
+ build_single_regex(b"rootglob", b"*.o", b"").unwrap()
+ );
+ }
+}
--- a/rust/hg-core/src/lib.rs Tue Jul 09 10:07:35 2019 -0400
+++ b/rust/hg-core/src/lib.rs Mon Jul 22 14:00:33 2019 -0400
@@ -5,7 +5,21 @@
mod ancestors;
pub mod dagops;
pub use ancestors::{AncestorsIterator, LazyAncestors, MissingAncestors};
-pub mod testing; // unconditionally built, for use from integration tests
+mod dirstate;
+pub mod discovery;
+pub mod testing; // unconditionally built, for use from integration tests
+pub use dirstate::{
+ dirs_multiset::DirsMultiset,
+ parsers::{pack_dirstate, parse_dirstate},
+ CopyVec, CopyVecEntry, DirsIterable, DirstateEntry, DirstateParents,
+ DirstateVec,
+};
+mod filepatterns;
+pub mod utils;
+
+pub use filepatterns::{
+ build_single_regex, read_pattern_file, PatternSyntax, PatternTuple,
+};
/// Mercurial revision numbers
///
@@ -13,7 +27,6 @@
/// 4 bytes, and are liberally converted to ints, whence the i32
pub type Revision = i32;
-
/// Marker expressing the absence of a parent
///
/// Independently of the actual representation, `NULL_REVISION` is guaranteed
@@ -31,11 +44,62 @@
/// Return the two parents of the given `Revision`.
///
/// Each of the parents can be independently `NULL_REVISION`
- fn parents(&self, Revision) -> Result<[Revision; 2], GraphError>;
+ fn parents(&self, rev: Revision) -> Result<[Revision; 2], GraphError>;
}
+pub type LineNumber = usize;
+
#[derive(Clone, Debug, PartialEq)]
pub enum GraphError {
ParentOutOfRange(Revision),
WorkingDirectoryUnsupported,
}
+
+#[derive(Clone, Debug, PartialEq)]
+pub enum DirstateParseError {
+ TooLittleData,
+ Overflow,
+ CorruptedEntry(String),
+}
+
+#[derive(Debug, PartialEq)]
+pub enum DirstatePackError {
+ CorruptedEntry(String),
+ CorruptedParent,
+ BadSize(usize, usize),
+}
+
+#[derive(Debug, PartialEq)]
+pub enum DirstateMapError {
+ PathNotFound(Vec<u8>),
+ EmptyPath,
+}
+
+impl From<std::io::Error> for DirstatePackError {
+ fn from(e: std::io::Error) -> Self {
+ DirstatePackError::CorruptedEntry(e.to_string())
+ }
+}
+
+impl From<std::io::Error> for DirstateParseError {
+ fn from(e: std::io::Error) -> Self {
+ DirstateParseError::CorruptedEntry(e.to_string())
+ }
+}
+
+#[derive(Debug)]
+pub enum PatternError {
+ UnsupportedSyntax(String),
+}
+
+#[derive(Debug)]
+pub enum PatternFileError {
+ IO(std::io::Error),
+ Pattern(PatternError, LineNumber),
+}
+
+impl From<std::io::Error> for PatternFileError {
+ fn from(e: std::io::Error) -> Self {
+ PatternFileError::IO(e)
+ }
+}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/rust/hg-core/src/utils.rs Mon Jul 22 14:00:33 2019 -0400
@@ -0,0 +1,75 @@
+pub mod files;
+
+/// Replaces the `from` slice with the `to` slice inside the `buf` slice.
+///
+/// # Examples
+///
+/// ```
+/// use crate::hg::utils::replace_slice;
+/// let mut line = b"I hate writing tests!".to_vec();
+/// replace_slice(&mut line, b"hate", b"love");
+/// assert_eq!(
+/// line,
+/// b"I love writing tests!".to_vec()
+///);
+///
+/// ```
+pub fn replace_slice<T>(buf: &mut [T], from: &[T], to: &[T])
+where
+ T: Clone + PartialEq,
+{
+ if buf.len() < from.len() || from.len() != to.len() {
+ return;
+ }
+ for i in 0..=buf.len() - from.len() {
+ if buf[i..].starts_with(from) {
+ buf[i..(i + from.len())].clone_from_slice(to);
+ }
+ }
+}
+
+pub trait SliceExt {
+ fn trim_end(&self) -> &Self;
+ fn trim_start(&self) -> &Self;
+ fn trim(&self) -> &Self;
+}
+
+fn is_not_whitespace(c: &u8) -> bool {
+ !(*c as char).is_whitespace()
+}
+
+impl SliceExt for [u8] {
+ fn trim_end(&self) -> &[u8] {
+ if let Some(last) = self.iter().rposition(is_not_whitespace) {
+ &self[..last + 1]
+ } else {
+ &[]
+ }
+ }
+ fn trim_start(&self) -> &[u8] {
+ if let Some(first) = self.iter().position(is_not_whitespace) {
+ &self[first..]
+ } else {
+ &[]
+ }
+ }
+
+ /// ```
+ /// use hg::utils::SliceExt;
+ /// assert_eq!(
+ /// b" to trim ".trim(),
+ /// b"to trim"
+ /// );
+ /// assert_eq!(
+ /// b"to trim ".trim(),
+ /// b"to trim"
+ /// );
+ /// assert_eq!(
+ /// b" to trim".trim(),
+ /// b"to trim"
+ /// );
+ /// ```
+ fn trim(&self) -> &[u8] {
+ self.trim_start().trim_end()
+ }
+}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/rust/hg-core/src/utils/files.rs Mon Jul 22 14:00:33 2019 -0400
@@ -0,0 +1,83 @@
+use std::iter::FusedIterator;
+use std::path::Path;
+
+pub fn get_path_from_bytes(bytes: &[u8]) -> &Path {
+ let os_str;
+ #[cfg(unix)]
+ {
+ use std::os::unix::ffi::OsStrExt;
+ os_str = std::ffi::OsStr::from_bytes(bytes);
+ }
+ #[cfg(windows)]
+ {
+ // TODO: convert from Windows MBCS (ANSI encoding) to WTF8.
+ // Perhaps, the return type would have to be Result<PathBuf>.
+ use std::os::windows::ffi::OsStrExt;
+ os_str = std::ffi::OsString::from_wide(bytes);
+ }
+
+ Path::new(os_str)
+}
+
+/// An iterator over repository path yielding itself and its ancestors.
+#[derive(Copy, Clone, Debug)]
+pub struct Ancestors<'a> {
+ next: Option<&'a [u8]>,
+}
+
+impl<'a> Iterator for Ancestors<'a> {
+ // if we had an HgPath type, this would yield &'a HgPath
+ type Item = &'a [u8];
+
+ fn next(&mut self) -> Option<Self::Item> {
+ let next = self.next;
+ self.next = match self.next {
+ Some(s) if s.is_empty() => None,
+ Some(s) => {
+ let p = s.iter().rposition(|&c| c == b'/').unwrap_or(0);
+ Some(&s[..p])
+ }
+ None => None,
+ };
+ next
+ }
+}
+
+impl<'a> FusedIterator for Ancestors<'a> {}
+
+/// Returns an iterator yielding ancestor directories of the given repository
+/// path.
+///
+/// The path is separated by '/', and must not start with '/'.
+///
+/// The path itself isn't included unless it is b"" (meaning the root
+/// directory.)
+pub fn find_dirs<'a>(path: &'a [u8]) -> Ancestors<'a> {
+ let mut dirs = Ancestors { next: Some(path) };
+ if !path.is_empty() {
+ dirs.next(); // skip itself
+ }
+ dirs
+}
+
+#[cfg(test)]
+mod tests {
+ #[test]
+ fn find_dirs_some() {
+ let mut dirs = super::find_dirs(b"foo/bar/baz");
+ assert_eq!(dirs.next(), Some(b"foo/bar".as_ref()));
+ assert_eq!(dirs.next(), Some(b"foo".as_ref()));
+ assert_eq!(dirs.next(), Some(b"".as_ref()));
+ assert_eq!(dirs.next(), None);
+ assert_eq!(dirs.next(), None);
+ }
+
+ #[test]
+ fn find_dirs_empty() {
+ // looks weird, but mercurial.util.finddirs(b"") yields b""
+ let mut dirs = super::find_dirs(b"");
+ assert_eq!(dirs.next(), Some(b"".as_ref()));
+ assert_eq!(dirs.next(), None);
+ assert_eq!(dirs.next(), None);
+ }
+}
--- a/rust/hg-core/tests/test_missing_ancestors.rs Tue Jul 09 10:07:35 2019 -0400
+++ b/rust/hg-core/tests/test_missing_ancestors.rs Mon Jul 22 14:00:33 2019 -0400
@@ -1,7 +1,3 @@
-extern crate hg;
-extern crate rand;
-extern crate rand_pcg;
-
use hg::testing::VecGraph;
use hg::Revision;
use hg::*;
--- a/rust/hg-cpython/Cargo.toml Tue Jul 09 10:07:35 2019 -0400
+++ b/rust/hg-cpython/Cargo.toml Mon Jul 22 14:00:33 2019 -0400
@@ -2,6 +2,7 @@
name = "hg-cpython"
version = "0.1.0"
authors = ["Georges Racinet <gracinet@anybox.fr>"]
+edition = "2018"
[lib]
name='rusthg'
--- a/rust/hg-cpython/src/ancestors.rs Tue Jul 09 10:07:35 2019 -0400
+++ b/rust/hg-cpython/src/ancestors.rs Mon Jul 22 14:00:33 2019 -0400
@@ -34,13 +34,15 @@
//! [`LazyAncestors`]: struct.LazyAncestors.html
//! [`MissingAncestors`]: struct.MissingAncestors.html
//! [`AncestorsIterator`]: struct.AncestorsIterator.html
-use crate::conversion::{py_set, rev_pyiter_collect};
-use cindex::Index;
+use crate::{
+ cindex::Index,
+ conversion::{py_set, rev_pyiter_collect},
+ exceptions::GraphError,
+};
use cpython::{
ObjectProtocol, PyClone, PyDict, PyList, PyModule, PyObject, PyResult,
Python, PythonObject, ToPyObject,
};
-use exceptions::GraphError;
use hg::Revision;
use hg::{
AncestorsIterator as CoreIterator, LazyAncestors as CoreLazy,
--- a/rust/hg-cpython/src/cindex.rs Tue Jul 09 10:07:35 2019 -0400
+++ b/rust/hg-cpython/src/cindex.rs Mon Jul 22 14:00:33 2019 -0400
@@ -10,14 +10,14 @@
//! Ideally, we should use an Index entirely implemented in Rust,
//! but this will take some time to get there.
#[cfg(feature = "python27")]
-extern crate python27_sys as python_sys;
+use python27_sys as python_sys;
#[cfg(feature = "python3")]
-extern crate python3_sys as python_sys;
+use python3_sys as python_sys;
-use self::python_sys::PyCapsule_Import;
use cpython::{PyClone, PyErr, PyObject, PyResult, Python};
use hg::{Graph, GraphError, Revision, WORKING_DIRECTORY_REVISION};
use libc::c_int;
+use python_sys::PyCapsule_Import;
use std::ffi::CStr;
use std::mem::transmute;
--- a/rust/hg-cpython/src/dagops.rs Tue Jul 09 10:07:35 2019 -0400
+++ b/rust/hg-cpython/src/dagops.rs Mon Jul 22 14:00:33 2019 -0400
@@ -9,10 +9,12 @@
//! `hg-core` package.
//!
//! From Python, this will be seen as `mercurial.rustext.dagop`
-use cindex::Index;
+use crate::{
+ cindex::Index,
+ conversion::{py_set, rev_pyiter_collect},
+ exceptions::GraphError,
+};
use cpython::{PyDict, PyModule, PyObject, PyResult, Python};
-use crate::conversion::{py_set, rev_pyiter_collect};
-use exceptions::GraphError;
use hg::dagops;
use hg::Revision;
use std::collections::HashSet;
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/rust/hg-cpython/src/dirstate.rs Mon Jul 22 14:00:33 2019 -0400
@@ -0,0 +1,331 @@
+// dirstate.rs
+//
+// Copyright 2019 Raphaël Gomès <rgomes@octobus.net>
+//
+// This software may be used and distributed according to the terms of the
+// GNU General Public License version 2 or any later version.
+
+//! Bindings for the `hg::dirstate` module provided by the
+//! `hg-core` package.
+//!
+//! From Python, this will be seen as `mercurial.rustext.dirstate`
+
+use cpython::{
+ exc, ObjectProtocol, PyBytes, PyDict, PyErr, PyInt, PyModule, PyObject,
+ PyResult, PySequence, PyTuple, Python, PythonObject, ToPyObject,
+};
+use hg::{
+ pack_dirstate, parse_dirstate, CopyVecEntry, DirsIterable, DirsMultiset,
+ DirstateEntry, DirstateMapError, DirstatePackError, DirstateParents,
+ DirstateParseError, DirstateVec,
+};
+use libc::{c_char, c_int};
+#[cfg(feature = "python27")]
+use python27_sys::PyCapsule_Import;
+#[cfg(feature = "python3")]
+use python3_sys::PyCapsule_Import;
+use std::cell::RefCell;
+use std::collections::HashMap;
+use std::ffi::CStr;
+use std::mem::transmute;
+
+/// C code uses a custom `dirstate_tuple` type, checks in multiple instances
+/// for this type, and raises a Python `Exception` if the check does not pass.
+/// Because this type differs only in name from the regular Python tuple, it
+/// would be a good idea in the near future to remove it entirely to allow
+/// for a pure Python tuple of the same effective structure to be used,
+/// rendering this type and the capsule below useless.
+type MakeDirstateTupleFn = extern "C" fn(
+ state: c_char,
+ mode: c_int,
+ size: c_int,
+ mtime: c_int,
+) -> PyObject;
+
+/// This is largely a copy/paste from cindex.rs, pending the merge of a
+/// `py_capsule_fn!` macro in the rust-cpython project:
+/// https://github.com/dgrunwald/rust-cpython/pull/169
+fn decapsule_make_dirstate_tuple(py: Python) -> PyResult<MakeDirstateTupleFn> {
+ unsafe {
+ let caps_name = CStr::from_bytes_with_nul_unchecked(
+ b"mercurial.cext.parsers.make_dirstate_tuple_CAPI\0",
+ );
+ let from_caps = PyCapsule_Import(caps_name.as_ptr(), 0);
+ if from_caps.is_null() {
+ return Err(PyErr::fetch(py));
+ }
+ Ok(transmute(from_caps))
+ }
+}
+
+fn parse_dirstate_wrapper(
+ py: Python,
+ dmap: PyDict,
+ copymap: PyDict,
+ st: PyBytes,
+) -> PyResult<PyTuple> {
+ match parse_dirstate(st.data(py)) {
+ Ok((parents, dirstate_vec, copies)) => {
+ for (filename, entry) in dirstate_vec {
+ dmap.set_item(
+ py,
+ PyBytes::new(py, &filename[..]),
+ decapsule_make_dirstate_tuple(py)?(
+ entry.state as c_char,
+ entry.mode,
+ entry.size,
+ entry.mtime,
+ ),
+ )?;
+ }
+ for CopyVecEntry { path, copy_path } in copies {
+ copymap.set_item(
+ py,
+ PyBytes::new(py, path),
+ PyBytes::new(py, copy_path),
+ )?;
+ }
+ Ok((PyBytes::new(py, parents.p1), PyBytes::new(py, parents.p2))
+ .to_py_object(py))
+ }
+ Err(e) => Err(PyErr::new::<exc::ValueError, _>(
+ py,
+ match e {
+ DirstateParseError::TooLittleData => {
+ "too little data for parents".to_string()
+ }
+ DirstateParseError::Overflow => {
+ "overflow in dirstate".to_string()
+ }
+ DirstateParseError::CorruptedEntry(e) => e,
+ },
+ )),
+ }
+}
+
+fn extract_dirstate_vec(
+ py: Python,
+ dmap: &PyDict,
+) -> Result<DirstateVec, PyErr> {
+ dmap.items(py)
+ .iter()
+ .map(|(filename, stats)| {
+ let stats = stats.extract::<PySequence>(py)?;
+ let state = stats.get_item(py, 0)?.extract::<PyBytes>(py)?;
+ let state = state.data(py)[0] as i8;
+ let mode = stats.get_item(py, 1)?.extract(py)?;
+ let size = stats.get_item(py, 2)?.extract(py)?;
+ let mtime = stats.get_item(py, 3)?.extract(py)?;
+ let filename = filename.extract::<PyBytes>(py)?;
+ let filename = filename.data(py);
+ Ok((
+ filename.to_owned(),
+ DirstateEntry {
+ state,
+ mode,
+ size,
+ mtime,
+ },
+ ))
+ })
+ .collect()
+}
+
+fn pack_dirstate_wrapper(
+ py: Python,
+ dmap: PyDict,
+ copymap: PyDict,
+ pl: PyTuple,
+ now: PyInt,
+) -> PyResult<PyBytes> {
+ let p1 = pl.get_item(py, 0).extract::<PyBytes>(py)?;
+ let p1: &[u8] = p1.data(py);
+ let p2 = pl.get_item(py, 1).extract::<PyBytes>(py)?;
+ let p2: &[u8] = p2.data(py);
+
+ let dirstate_vec = extract_dirstate_vec(py, &dmap)?;
+
+ let copies: Result<HashMap<Vec<u8>, Vec<u8>>, PyErr> = copymap
+ .items(py)
+ .iter()
+ .map(|(key, value)| {
+ Ok((
+ key.extract::<PyBytes>(py)?.data(py).to_owned(),
+ value.extract::<PyBytes>(py)?.data(py).to_owned(),
+ ))
+ })
+ .collect();
+
+ match pack_dirstate(
+ &dirstate_vec,
+ &copies?,
+ DirstateParents { p1, p2 },
+ now.as_object().extract::<i32>(py)?,
+ ) {
+ Ok((packed, new_dirstate_vec)) => {
+ for (
+ filename,
+ DirstateEntry {
+ state,
+ mode,
+ size,
+ mtime,
+ },
+ ) in new_dirstate_vec
+ {
+ dmap.set_item(
+ py,
+ PyBytes::new(py, &filename[..]),
+ decapsule_make_dirstate_tuple(py)?(
+ state as c_char,
+ mode,
+ size,
+ mtime,
+ ),
+ )?;
+ }
+ Ok(PyBytes::new(py, &packed))
+ }
+ Err(error) => Err(PyErr::new::<exc::ValueError, _>(
+ py,
+ match error {
+ DirstatePackError::CorruptedParent => {
+ "expected a 20-byte hash".to_string()
+ }
+ DirstatePackError::CorruptedEntry(e) => e,
+ DirstatePackError::BadSize(expected, actual) => {
+ format!("bad dirstate size: {} != {}", actual, expected)
+ }
+ },
+ )),
+ }
+}
+
+py_class!(pub class Dirs |py| {
+ data dirs_map: RefCell<DirsMultiset>;
+
+ // `map` is either a `dict` or a flat iterator (usually a `set`, sometimes
+ // a `list`)
+ def __new__(
+ _cls,
+ map: PyObject,
+ skip: Option<PyObject> = None
+ ) -> PyResult<Self> {
+ let mut skip_state: Option<i8> = None;
+ if let Some(skip) = skip {
+ skip_state = Some(skip.extract::<PyBytes>(py)?.data(py)[0] as i8);
+ }
+ let dirs_map;
+
+ if let Ok(map) = map.cast_as::<PyDict>(py) {
+ let dirstate_vec = extract_dirstate_vec(py, &map)?;
+ dirs_map = DirsMultiset::new(
+ DirsIterable::Dirstate(dirstate_vec),
+ skip_state,
+ )
+ } else {
+ let map: Result<Vec<Vec<u8>>, PyErr> = map
+ .iter(py)?
+ .map(|o| Ok(o?.extract::<PyBytes>(py)?.data(py).to_owned()))
+ .collect();
+ dirs_map = DirsMultiset::new(
+ DirsIterable::Manifest(map?),
+ skip_state,
+ )
+ }
+
+ Self::create_instance(py, RefCell::new(dirs_map))
+ }
+
+ def addpath(&self, path: PyObject) -> PyResult<PyObject> {
+ self.dirs_map(py).borrow_mut().add_path(
+ path.extract::<PyBytes>(py)?.data(py),
+ );
+ Ok(py.None())
+ }
+
+ def delpath(&self, path: PyObject) -> PyResult<PyObject> {
+ self.dirs_map(py).borrow_mut().delete_path(
+ path.extract::<PyBytes>(py)?.data(py),
+ )
+ .and(Ok(py.None()))
+ .or_else(|e| {
+ match e {
+ DirstateMapError::PathNotFound(_p) => {
+ Err(PyErr::new::<exc::ValueError, _>(
+ py,
+ "expected a value, found none".to_string(),
+ ))
+ }
+ DirstateMapError::EmptyPath => {
+ Ok(py.None())
+ }
+ }
+ })
+ }
+
+ // This is really inefficient on top of being ugly, but it's an easy way
+ // of having it work to continue working on the rest of the module
+ // hopefully bypassing Python entirely pretty soon.
+ def __iter__(&self) -> PyResult<PyObject> {
+ let dict = PyDict::new(py);
+
+ for (key, value) in self.dirs_map(py).borrow().iter() {
+ dict.set_item(
+ py,
+ PyBytes::new(py, &key[..]),
+ value.to_py_object(py),
+ )?;
+ }
+
+ let locals = PyDict::new(py);
+ locals.set_item(py, "obj", dict)?;
+
+ py.eval("iter(obj)", None, Some(&locals))
+ }
+
+ def __contains__(&self, item: PyObject) -> PyResult<bool> {
+ Ok(self
+ .dirs_map(py)
+ .borrow()
+ .contains_key(item.extract::<PyBytes>(py)?.data(py).as_ref()))
+ }
+});
+
+/// Create the module, with `__package__` given from parent
+pub fn init_module(py: Python, package: &str) -> PyResult<PyModule> {
+ let dotted_name = &format!("{}.dirstate", package);
+ let m = PyModule::new(py, dotted_name)?;
+
+ m.add(py, "__package__", package)?;
+ m.add(py, "__doc__", "Dirstate - Rust implementation")?;
+ m.add(
+ py,
+ "parse_dirstate",
+ py_fn!(
+ py,
+ parse_dirstate_wrapper(dmap: PyDict, copymap: PyDict, st: PyBytes)
+ ),
+ )?;
+ m.add(
+ py,
+ "pack_dirstate",
+ py_fn!(
+ py,
+ pack_dirstate_wrapper(
+ dmap: PyDict,
+ copymap: PyDict,
+ pl: PyTuple,
+ now: PyInt
+ )
+ ),
+ )?;
+
+ m.add_class::<Dirs>(py)?;
+
+ let sys = PyModule::import(py, "sys")?;
+ let sys_modules: PyDict = sys.get(py, "modules")?.extract(py)?;
+ sys_modules.set_item(py, dotted_name, &m)?;
+
+ Ok(m)
+}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/rust/hg-cpython/src/discovery.rs Mon Jul 22 14:00:33 2019 -0400
@@ -0,0 +1,129 @@
+// discovery.rs
+//
+// Copyright 2018 Georges Racinet <gracinet@anybox.fr>
+//
+// This software may be used and distributed according to the terms of the
+// GNU General Public License version 2 or any later version.
+
+//! Bindings for the `hg::discovery` module provided by the
+//! `hg-core` crate. From Python, this will be seen as `rustext.discovery`
+//!
+//! # Classes visible from Python:
+//! - [`PartialDiscover`] is the Rust implementation of
+//! `mercurial.setdiscovery.partialdiscovery`.
+
+use crate::{
+ cindex::Index,
+ conversion::{py_set, rev_pyiter_collect},
+ exceptions::GraphError,
+};
+use cpython::{
+ ObjectProtocol, PyDict, PyModule, PyObject, PyResult, Python,
+ PythonObject, ToPyObject,
+};
+use hg::discovery::PartialDiscovery as CorePartialDiscovery;
+use hg::Revision;
+
+use std::cell::RefCell;
+
+py_class!(pub class PartialDiscovery |py| {
+ data inner: RefCell<Box<CorePartialDiscovery<Index>>>;
+
+ def __new__(
+ _cls,
+ index: PyObject,
+ targetheads: PyObject
+ ) -> PyResult<PartialDiscovery> {
+ Self::create_instance(
+ py,
+ RefCell::new(Box::new(CorePartialDiscovery::new(
+ Index::new(py, index)?,
+ rev_pyiter_collect(py, &targetheads)?,
+ )))
+ )
+ }
+
+ def addcommons(&self, commons: PyObject) -> PyResult<PyObject> {
+ let mut inner = self.inner(py).borrow_mut();
+ let commons_vec: Vec<Revision> = rev_pyiter_collect(py, &commons)?;
+ inner.add_common_revisions(commons_vec)
+ .map_err(|e| GraphError::pynew(py, e))?;
+ Ok(py.None())
+ }
+
+ def addmissings(&self, missings: PyObject) -> PyResult<PyObject> {
+ let mut inner = self.inner(py).borrow_mut();
+ let missings_vec: Vec<Revision> = rev_pyiter_collect(py, &missings)?;
+ inner.add_missing_revisions(missings_vec)
+ .map_err(|e| GraphError::pynew(py, e))?;
+ Ok(py.None())
+ }
+
+ def addinfo(&self, sample: PyObject) -> PyResult<PyObject> {
+ let mut missing: Vec<Revision> = Vec::new();
+ let mut common: Vec<Revision> = Vec::new();
+ for info in sample.iter(py)? { // info is a pair (Revision, bool)
+ let mut revknown = info?.iter(py)?;
+ let rev: Revision = revknown.next().unwrap()?.extract(py)?;
+ let known: bool = revknown.next().unwrap()?.extract(py)?;
+ if known {
+ common.push(rev);
+ } else {
+ missing.push(rev);
+ }
+ }
+ let mut inner = self.inner(py).borrow_mut();
+ inner.add_common_revisions(common)
+ .map_err(|e| GraphError::pynew(py, e))?;
+ inner.add_missing_revisions(missing)
+ .map_err(|e| GraphError::pynew(py, e))?;
+ Ok(py.None())
+ }
+
+ def hasinfo(&self) -> PyResult<bool> {
+ Ok(self.inner(py).borrow().has_info())
+ }
+
+ def iscomplete(&self) -> PyResult<bool> {
+ Ok(self.inner(py).borrow().is_complete())
+ }
+
+ def stats(&self) -> PyResult<PyDict> {
+ let stats = self.inner(py).borrow().stats();
+ let as_dict: PyDict = PyDict::new(py);
+ as_dict.set_item(py, "undecided",
+ stats.undecided.map(
+ |l| l.to_py_object(py).into_object())
+ .unwrap_or_else(|| py.None()))?;
+ Ok(as_dict)
+ }
+
+ def commonheads(&self) -> PyResult<PyObject> {
+ py_set(
+ py,
+ &self.inner(py).borrow().common_heads()
+ .map_err(|e| GraphError::pynew(py, e))?
+ )
+ }
+});
+
+/// Create the module, with __package__ given from parent
+pub fn init_module(py: Python, package: &str) -> PyResult<PyModule> {
+ let dotted_name = &format!("{}.discovery", package);
+ let m = PyModule::new(py, dotted_name)?;
+ m.add(py, "__package__", package)?;
+ m.add(
+ py,
+ "__doc__",
+ "Discovery of common node sets - Rust implementation",
+ )?;
+ m.add_class::<PartialDiscovery>(py)?;
+
+ let sys = PyModule::import(py, "sys")?;
+ let sys_modules: PyDict = sys.get(py, "modules")?.extract(py)?;
+ sys_modules.set_item(py, dotted_name, &m)?;
+ // Example C code (see pyexpat.c and import.c) will "give away the
+ // reference", but we won't because it will be consumed once the
+ // Rust PyObject is dropped.
+ Ok(m)
+}
--- a/rust/hg-cpython/src/exceptions.rs Tue Jul 09 10:07:35 2019 -0400
+++ b/rust/hg-cpython/src/exceptions.rs Mon Jul 22 14:00:33 2019 -0400
@@ -12,8 +12,10 @@
//! existing Python exceptions if appropriate.
//!
//! [`GraphError`]: struct.GraphError.html
-use cpython::exc::ValueError;
-use cpython::{PyErr, Python};
+use cpython::{
+ exc::{IOError, RuntimeError, ValueError},
+ py_exception, PyErr, Python,
+};
use hg;
py_exception!(rustext, GraphError, ValueError);
@@ -36,3 +38,32 @@
}
}
}
+
+py_exception!(rustext, PatternError, RuntimeError);
+py_exception!(rustext, PatternFileError, RuntimeError);
+
+impl PatternError {
+ pub fn pynew(py: Python, inner: hg::PatternError) -> PyErr {
+ match inner {
+ hg::PatternError::UnsupportedSyntax(m) => {
+ PatternError::new(py, ("PatternError", m))
+ }
+ }
+ }
+}
+
+impl PatternFileError {
+ pub fn pynew(py: Python, inner: hg::PatternFileError) -> PyErr {
+ match inner {
+ hg::PatternFileError::IO(e) => {
+ let value = (e.raw_os_error().unwrap_or(2), e.to_string());
+ PyErr::new::<IOError, _>(py, value)
+ }
+ hg::PatternFileError::Pattern(e, l) => match e {
+ hg::PatternError::UnsupportedSyntax(m) => {
+ PatternFileError::new(py, ("PatternFileError", m, l))
+ }
+ },
+ }
+ }
+}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/rust/hg-cpython/src/filepatterns.rs Mon Jul 22 14:00:33 2019 -0400
@@ -0,0 +1,124 @@
+// filepatterns.rs
+//
+// Copyright 2019, Georges Racinet <gracinet@anybox.fr>,
+// Raphaël Gomès <rgomes@octobus.net>
+//
+// This software may be used and distributed according to the terms of the
+// GNU General Public License version 2 or any later version.
+
+//! Bindings for the `hg::filepatterns` module provided by the
+//! `hg-core` crate. From Python, this will be seen as `rustext.filepatterns`
+//! and can be used as replacement for the the pure `filepatterns` Python module.
+//!
+use crate::exceptions::{PatternError, PatternFileError};
+use cpython::{
+ PyBytes, PyDict, PyModule, PyObject, PyResult, PyTuple, Python, ToPyObject,
+};
+use hg::{build_single_regex, read_pattern_file, LineNumber, PatternTuple};
+
+/// Rust does not like functions with different return signatures.
+/// The 3-tuple version is always returned by the hg-core function,
+/// the (potential) conversion is handled at this level since it is not likely
+/// to have any measurable impact on performance.
+///
+/// The Python implementation passes a function reference for `warn` instead
+/// of a boolean that is used to emit warnings while parsing. The Rust
+/// implementation chooses to accumulate the warnings and propagate them to
+/// Python upon completion. See the `readpatternfile` function in `match.py`
+/// for more details.
+fn read_pattern_file_wrapper(
+ py: Python,
+ file_path: PyObject,
+ warn: bool,
+ source_info: bool,
+) -> PyResult<PyTuple> {
+ match read_pattern_file(file_path.extract::<PyBytes>(py)?.data(py), warn) {
+ Ok((patterns, warnings)) => {
+ if source_info {
+ let itemgetter = |x: &PatternTuple| {
+ (PyBytes::new(py, &x.0), x.1, PyBytes::new(py, &x.2))
+ };
+ let results: Vec<(PyBytes, LineNumber, PyBytes)> =
+ patterns.iter().map(itemgetter).collect();
+ return Ok((results, warnings_to_py_bytes(py, &warnings))
+ .to_py_object(py));
+ }
+ let itemgetter = |x: &PatternTuple| PyBytes::new(py, &x.0);
+ let results: Vec<PyBytes> =
+ patterns.iter().map(itemgetter).collect();
+ Ok(
+ (results, warnings_to_py_bytes(py, &warnings))
+ .to_py_object(py),
+ )
+ }
+ Err(e) => Err(PatternFileError::pynew(py, e)),
+ }
+}
+
+fn warnings_to_py_bytes(
+ py: Python,
+ warnings: &[(Vec<u8>, Vec<u8>)],
+) -> Vec<(PyBytes, PyBytes)> {
+ warnings
+ .iter()
+ .map(|(path, syn)| (PyBytes::new(py, path), PyBytes::new(py, syn)))
+ .collect()
+}
+
+fn build_single_regex_wrapper(
+ py: Python,
+ kind: PyObject,
+ pat: PyObject,
+ globsuffix: PyObject,
+) -> PyResult<PyBytes> {
+ match build_single_regex(
+ kind.extract::<PyBytes>(py)?.data(py),
+ pat.extract::<PyBytes>(py)?.data(py),
+ globsuffix.extract::<PyBytes>(py)?.data(py),
+ ) {
+ Ok(regex) => Ok(PyBytes::new(py, ®ex)),
+ Err(e) => Err(PatternError::pynew(py, e)),
+ }
+}
+
+pub fn init_module(py: Python, package: &str) -> PyResult<PyModule> {
+ let dotted_name = &format!("{}.filepatterns", package);
+ let m = PyModule::new(py, dotted_name)?;
+
+ m.add(py, "__package__", package)?;
+ m.add(
+ py,
+ "__doc__",
+ "Patterns files parsing - Rust implementation",
+ )?;
+ m.add(
+ py,
+ "build_single_regex",
+ py_fn!(
+ py,
+ build_single_regex_wrapper(
+ kind: PyObject,
+ pat: PyObject,
+ globsuffix: PyObject
+ )
+ ),
+ )?;
+ m.add(
+ py,
+ "read_pattern_file",
+ py_fn!(
+ py,
+ read_pattern_file_wrapper(
+ file_path: PyObject,
+ warn: bool,
+ source_info: bool
+ )
+ ),
+ )?;
+ m.add(py, "PatternError", py.get_type::<PatternError>())?;
+ let sys = PyModule::import(py, "sys")?;
+ let sys_modules: PyDict = sys.get(py, "modules")?.extract(py)?;
+ sys_modules.set_item(py, dotted_name, &m)?;
+
+ Ok(m)
+}
--- a/rust/hg-cpython/src/lib.rs Tue Jul 09 10:07:35 2019 -0400
+++ b/rust/hg-cpython/src/lib.rs Mon Jul 22 14:00:33 2019 -0400
@@ -19,16 +19,19 @@
//! 'Generic DAG ancestor algorithms - Rust implementation'
//! ```
+/// This crate uses nested private macros, `extern crate` is still needed in
+/// 2018 edition.
#[macro_use]
extern crate cpython;
-extern crate hg;
-extern crate libc;
pub mod ancestors;
mod cindex;
mod conversion;
pub mod dagops;
+pub mod dirstate;
+pub mod discovery;
pub mod exceptions;
+pub mod filepatterns;
py_module_initializer!(rustext, initrustext, PyInit_rustext, |py, m| {
m.add(
@@ -40,6 +43,23 @@
let dotted_name: String = m.get(py, "__name__")?.extract(py)?;
m.add(py, "ancestor", ancestors::init_module(py, &dotted_name)?)?;
m.add(py, "dagop", dagops::init_module(py, &dotted_name)?)?;
+ m.add(py, "discovery", discovery::init_module(py, &dotted_name)?)?;
+ m.add(py, "dirstate", dirstate::init_module(py, &dotted_name)?)?;
+ m.add(
+ py,
+ "filepatterns",
+ filepatterns::init_module(py, &dotted_name)?,
+ )?;
m.add(py, "GraphError", py.get_type::<exceptions::GraphError>())?;
+ m.add(
+ py,
+ "PatternFileError",
+ py.get_type::<exceptions::PatternFileError>(),
+ )?;
+ m.add(
+ py,
+ "PatternError",
+ py.get_type::<exceptions::PatternError>(),
+ )?;
Ok(())
});
--- a/setup.py Tue Jul 09 10:07:35 2019 -0400
+++ b/setup.py Mon Jul 22 14:00:33 2019 -0400
@@ -32,6 +32,7 @@
])
import sys, platform
+import sysconfig
if sys.version_info[0] >= 3:
printf = eval('print')
libdir_escape = 'unicode_escape'
@@ -104,6 +105,12 @@
printf(error, file=sys.stderr)
sys.exit(1)
+if sys.version_info[0] >= 3:
+ DYLIB_SUFFIX = sysconfig.get_config_vars()['EXT_SUFFIX']
+else:
+ # deprecated in Python 3
+ DYLIB_SUFFIX = sysconfig.get_config_vars()['SO']
+
# Solaris Python packaging brain damage
try:
import hashlib
@@ -446,10 +453,12 @@
class hgdist(Distribution):
pure = False
+ rust = hgrustext is not None
cffi = ispypy
global_options = Distribution.global_options + [
('pure', None, "use pure (slow) Python code instead of C extensions"),
+ ('rust', None, "use Rust extensions additionally to C extensions"),
]
def has_ext_modules(self):
@@ -460,18 +469,25 @@
# This is ugly as a one-liner. So use a variable.
buildextnegops = dict(getattr(build_ext, 'negative_options', {}))
buildextnegops['no-zstd'] = 'zstd'
+buildextnegops['no-rust'] = 'rust'
class hgbuildext(build_ext):
user_options = build_ext.user_options + [
('zstd', None, 'compile zstd bindings [default]'),
('no-zstd', None, 'do not compile zstd bindings'),
+ ('rust', None,
+ 'compile Rust extensions if they are in use '
+ '(requires Cargo) [default]'),
+ ('no-rust', None, 'do not compile Rust extensions'),
]
- boolean_options = build_ext.boolean_options + ['zstd']
+ boolean_options = build_ext.boolean_options + ['zstd', 'rust']
negative_opt = buildextnegops
def initialize_options(self):
self.zstd = True
+ self.rust = True
+
return build_ext.initialize_options(self)
def build_extensions(self):
@@ -484,14 +500,19 @@
self.extensions = [e for e in self.extensions
if e.name != 'mercurial.zstd']
- for rustext in ruststandalones:
- rustext.build('' if self.inplace else self.build_lib)
+ # Build Rust standalon extensions if it'll be used
+ # and its build is not explictely disabled (for external build
+ # as Linux distributions would do)
+ if self.distribution.rust and self.rust and hgrustext != 'direct-ffi':
+ for rustext in ruststandalones:
+ rustext.build('' if self.inplace else self.build_lib)
return build_ext.build_extensions(self)
def build_extension(self, ext):
- if isinstance(ext, RustExtension):
- ext.rustbuild()
+ if (self.distribution.rust and self.rust
+ and isinstance(ext, RustExtension)):
+ ext.rustbuild()
try:
build_ext.build_extension(self, ext)
except CCompilerError:
@@ -553,13 +574,14 @@
basepath = os.path.join(self.build_lib, 'mercurial')
self.mkpath(basepath)
+ rust = self.distribution.rust
if self.distribution.pure:
modulepolicy = 'py'
elif self.build_lib == '.':
- # in-place build should run without rebuilding C extensions
- modulepolicy = 'allow'
+ # in-place build should run without rebuilding and Rust extensions
+ modulepolicy = 'rust+c-allow' if rust else 'allow'
else:
- modulepolicy = 'c'
+ modulepolicy = 'rust+c' if rust else 'c'
content = b''.join([
b'# this file is autogenerated by setup.py\n',
@@ -1131,8 +1153,6 @@
def __init__(self, mpath, sources, rustlibname, subcrate,
py3_features=None, **kw):
Extension.__init__(self, mpath, sources, **kw)
- if hgrustext is None:
- return
srcdir = self.rustsrcdir = os.path.join('rust', subcrate)
self.py3_features = py3_features
@@ -1147,9 +1167,20 @@
for fname in fnames
if os.path.splitext(fname)[1] == '.rs')
+ @staticmethod
+ def rustdylibsuffix():
+ """Return the suffix for shared libraries produced by rustc.
+
+ See also: https://doc.rust-lang.org/reference/linkage.html
+ """
+ if sys.platform == 'darwin':
+ return '.dylib'
+ elif os.name == 'nt':
+ return '.dll'
+ else:
+ return '.so'
+
def rustbuild(self):
- if hgrustext is None:
- return
env = os.environ.copy()
if 'HGTEST_RESTOREENV' in env:
# Mercurial tests change HOME to a temporary directory,
@@ -1164,10 +1195,14 @@
import pwd
env['HOME'] = pwd.getpwuid(os.getuid()).pw_dir
- cargocmd = ['cargo', 'build', '-vv', '--release']
+ cargocmd = ['cargo', 'rustc', '-vv', '--release']
if sys.version_info[0] == 3 and self.py3_features is not None:
cargocmd.extend(('--features', self.py3_features,
'--no-default-features'))
+ cargocmd.append('--')
+ if sys.platform == 'darwin':
+ cargocmd.extend(("-C", "link-arg=-undefined",
+ "-C", "link-arg=dynamic_lookup"))
try:
subprocess.check_call(cargocmd, env=env, cwd=self.rustsrcdir)
except OSError as exc:
@@ -1201,6 +1236,10 @@
self.libraries.append(rustlibname)
self.library_dirs.append(self.rusttargetdir)
+ def rustbuild(self):
+ if hgrustext == 'direct-ffi':
+ RustExtension.rustbuild(self)
+
class RustStandaloneExtension(RustExtension):
def __init__(self, pydottedname, rustcrate, dylibname, **kw):
@@ -1212,9 +1251,9 @@
self.rustbuild()
target = [target_dir]
target.extend(self.name.split('.'))
- ext = '.so' # TODO Unix only
- target[-1] += ext
- shutil.copy2(os.path.join(self.rusttargetdir, self.dylibname + ext),
+ target[-1] += DYLIB_SUFFIX
+ shutil.copy2(os.path.join(self.rusttargetdir,
+ self.dylibname + self.rustdylibsuffix()),
os.path.join(*target))
@@ -1255,14 +1294,10 @@
]),
Extension('hgext.fsmonitor.pywatchman.bser',
['hgext/fsmonitor/pywatchman/bser.c']),
+ RustStandaloneExtension('mercurial.rustext', 'hg-cpython', 'librusthg',
+ py3_features='python3'),
]
-if hgrustext == 'cpython':
- extmodules.append(
- RustStandaloneExtension('mercurial.rustext', 'hg-cpython', 'librusthg',
- py3_features='python3')
- )
-
sys.path.insert(0, 'contrib/python-zstandard')
import setup_zstd
--- a/tests/common-pattern.py Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/common-pattern.py Mon Jul 22 14:00:33 2019 -0400
@@ -115,6 +115,11 @@
# Various platform error strings, keyed on a common replacement string
_errors = {
br'$ENOENT$': (
+ # IOError in Python does not have the same error message
+ # than in Rust, and automatic conversion is not possible
+ # because of module member privacy.
+ br'No such file or directory \(os error 2\)',
+
# strerror()
br'No such file or directory',
--- a/tests/drawdag.py Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/drawdag.py Mon Jul 22 14:00:33 2019 -0400
@@ -300,6 +300,12 @@
def commit(self):
return self._repo.commitctx(self)
+ def p1copies(self):
+ return {}
+
+ def p2copies(self):
+ return {}
+
def _walkgraph(edges):
"""yield node, parents in topologically order"""
visible = set(edges.keys())
--- a/tests/fakedirstatewritetime.py Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/fakedirstatewritetime.py Mon Jul 22 14:00:33 2019 -0400
@@ -16,6 +16,12 @@
)
from mercurial.utils import dateutil
+try:
+ from mercurial import rustext
+ rustext.__name__ # force actual import (see hgdemandimport)
+except ImportError:
+ rustext = None
+
configtable = {}
configitem = registrar.configitem(configtable)
@@ -51,16 +57,22 @@
# 'fakenow' value and 'touch -t YYYYmmddHHMM' argument easy
fakenow = dateutil.parsedate(fakenow, [b'%Y%m%d%H%M'])[0]
- orig_pack_dirstate = parsers.pack_dirstate
+ if rustext is not None:
+ orig_module = rustext.dirstate
+ orig_pack_dirstate = rustext.dirstate.pack_dirstate
+ else:
+ orig_module = parsers
+ orig_pack_dirstate = parsers.pack_dirstate
+
orig_dirstate_getfsnow = dirstate._getfsnow
wrapper = lambda *args: pack_dirstate(fakenow, orig_pack_dirstate, *args)
- parsers.pack_dirstate = wrapper
+ orig_module.pack_dirstate = wrapper
dirstate._getfsnow = lambda *args: fakenow
try:
return func()
finally:
- parsers.pack_dirstate = orig_pack_dirstate
+ orig_module.pack_dirstate = orig_pack_dirstate
dirstate._getfsnow = orig_dirstate_getfsnow
def _poststatusfixup(orig, workingctx, status, fixup):
@@ -74,5 +86,5 @@
def extsetup(ui):
extensions.wrapfunction(context.workingctx, '_poststatusfixup',
_poststatusfixup)
- extensions.wrapfunction(context.committablectx, 'markcommitted',
+ extensions.wrapfunction(context.workingctx, 'markcommitted',
markcommitted)
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/phabricator/phabread-str-time.json Mon Jul 22 14:00:33 2019 -0400
@@ -0,0 +1,221 @@
+{
+ "version": 1,
+ "interactions": [
+ {
+ "request": {
+ "body": "api.token=cli-hahayouwish&ids%5B0%5D=1285",
+ "headers": {
+ "content-length": [
+ "58"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 4.8.2)"
+ ]
+ },
+ "method": "POST",
+ "uri": "https://phab.mercurial-scm.org//api/differential.query"
+ },
+ "response": {
+ "status": {
+ "message": "OK",
+ "code": 200
+ },
+ "headers": {
+ "content-length": [
+ "822"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "strict-transport-security": [
+ "max-age=31536000; includeSubdomains; preload"
+ ],
+ "vary": [
+ "Accept-Encoding"
+ ],
+ "connection": [
+ "keep-alive"
+ ],
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "date": [
+ "Mon, 01 Jul 2019 22:36:40 GMT"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "referrer-policy": [
+ "no-referrer",
+ "strict-origin-when-cross-origin"
+ ],
+ "content-type": [
+ "application/json"
+ ]
+ },
+ "body": {
+ "string": "{\"result\":[{\"id\":\"1285\",\"phid\":\"PHID-DREV-uefuzc6kbhhkoqhr347g\",\"title\":\"test string time\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/D1285\",\"dateCreated\":\"1562019861\",\"dateModified\":\"1562019862\",\"authorPHID\":\"PHID-USER-qmzis76vb2yh3ogldu6r\",\"status\":\"0\",\"statusName\":\"Draft\",\"properties\":{\"draft.broadcast\":false,\"lines.added\":1,\"lines.removed\":0,\"buildables\":{\"PHID-HMBB-lhjiovrsqtbft2fz4lua\":{\"status\":\"passed\"}}},\"branch\":null,\"summary\":\"\",\"testPlan\":\"\",\"lineCount\":\"1\",\"activeDiffPHID\":\"PHID-DIFF-dkgwbpgcv37pymqieyyv\",\"diffs\":[\"2069\"],\"commits\":[],\"reviewers\":[],\"ccs\":[],\"hashes\":[],\"auxiliary\":{\"bugzilla.bug-id\":null,\"phabricator:projects\":[\"PHID-PROJ-f2a3wl5wxtqdtfgdjqzk\"],\"phabricator:depends-on\":[]},\"repositoryPHID\":\"PHID-REPO-bvunnehri4u2isyr7bc3\",\"sourcePath\":null}],\"error_code\":null,\"error_info\":null}"
+ }
+ }
+ },
+ {
+ "request": {
+ "body": "api.token=cli-hahayouwish&ids%5B0%5D=2069",
+ "headers": {
+ "content-length": [
+ "58"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 4.8.2)"
+ ]
+ },
+ "method": "POST",
+ "uri": "https://phab.mercurial-scm.org//api/differential.querydiffs"
+ },
+ "response": {
+ "status": {
+ "message": "OK",
+ "code": 200
+ },
+ "headers": {
+ "content-length": [
+ "1137"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "strict-transport-security": [
+ "max-age=31536000; includeSubdomains; preload"
+ ],
+ "vary": [
+ "Accept-Encoding"
+ ],
+ "connection": [
+ "keep-alive"
+ ],
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "date": [
+ "Mon, 01 Jul 2019 22:36:41 GMT"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "referrer-policy": [
+ "no-referrer",
+ "strict-origin-when-cross-origin"
+ ],
+ "content-type": [
+ "application/json"
+ ]
+ },
+ "body": {
+ "string": "{\"result\":{\"2069\":{\"id\":\"2069\",\"revisionID\":\"1285\",\"dateCreated\":\"1562019858\",\"dateModified\":\"1562019861\",\"sourceControlBaseRevision\":null,\"sourceControlPath\":null,\"sourceControlSystem\":null,\"branch\":null,\"bookmark\":null,\"creationMethod\":\"web\",\"description\":null,\"unitStatus\":\"4\",\"lintStatus\":\"4\",\"changes\":[{\"id\":\"5416\",\"metadata\":{\"line:first\":1,\"hash.effect\":\"ei3Zy6KS2Wut\"},\"oldPath\":null,\"currentPath\":\"test\",\"awayPaths\":[],\"oldProperties\":[],\"newProperties\":{\"unix:filemode\":\"100644\"},\"type\":\"1\",\"fileType\":\"1\",\"commitHash\":null,\"addLines\":\"1\",\"delLines\":\"0\",\"hunks\":[{\"oldOffset\":\"0\",\"newOffset\":\"1\",\"oldLength\":\"0\",\"newLength\":\"1\",\"addLines\":null,\"delLines\":null,\"isMissingOldNewline\":null,\"isMissingNewNewline\":null,\"corpus\":\"+test\\n\"}]}],\"properties\":{\"local:commits\":{\"da5c8c6bf23a36b6e3af011bc3734460692c23ce\":{\"author\":\"test\",\"authorEmail\":\"test\",\"branch\":\"default\",\"commit\":\"da5c8c6bf23a36b6e3af011bc3734460692c23ce\",\"rev\":\"da5c8c6bf23a36b6e3af011bc3734460692c23ce\",\"parents\":[\"1f634396406d03e565ed645370e5fecd062cf215\"],\"time\":\"1562019844\"}}},\"authorName\":\"test\",\"authorEmail\":\"test\"}},\"error_code\":null,\"error_info\":null}"
+ }
+ }
+ },
+ {
+ "request": {
+ "body": "diffID=2069&api.token=cli-hahayouwish",
+ "headers": {
+ "content-length": [
+ "54"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 4.8.2)"
+ ]
+ },
+ "method": "POST",
+ "uri": "https://phab.mercurial-scm.org//api/differential.getrawdiff"
+ },
+ "response": {
+ "status": {
+ "message": "OK",
+ "code": 200
+ },
+ "headers": {
+ "content-length": [
+ "153"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "strict-transport-security": [
+ "max-age=31536000; includeSubdomains; preload"
+ ],
+ "vary": [
+ "Accept-Encoding"
+ ],
+ "connection": [
+ "keep-alive"
+ ],
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "date": [
+ "Mon, 01 Jul 2019 22:36:42 GMT"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "referrer-policy": [
+ "no-referrer",
+ "strict-origin-when-cross-origin"
+ ],
+ "content-type": [
+ "application/json"
+ ]
+ },
+ "body": {
+ "string": "{\"result\":\"diff --git a\\/test b\\/test\\nnew file mode 100644\\n--- \\/dev\\/null\\n+++ b\\/test\\n@@ -0,0 +1 @@\\n+test\\n\\n\",\"error_code\":null,\"error_info\":null}"
+ }
+ }
+ }
+ ]
+}
\ No newline at end of file
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/phabricator/phabsend-comment-created.json Mon Jul 22 14:00:33 2019 -0400
@@ -0,0 +1,653 @@
+{
+ "version": 1,
+ "interactions": [
+ {
+ "response": {
+ "status": {
+ "message": "OK",
+ "code": 200
+ },
+ "body": {
+ "string": "{\"result\":{\"data\":[{\"id\":12,\"type\":\"REPO\",\"phid\":\"PHID-REPO-bvunnehri4u2isyr7bc3\",\"fields\":{\"name\":\"Mercurial\",\"vcs\":\"hg\",\"callsign\":\"HG\",\"shortName\":\"Mercurial\",\"status\":\"active\",\"isImporting\":false,\"almanacServicePHID\":null,\"refRules\":{\"fetchRules\":[],\"trackRules\":[],\"permanentRefRules\":[]},\"spacePHID\":null,\"dateCreated\":1523292927,\"dateModified\":1523297359,\"policy\":{\"view\":\"public\",\"edit\":\"admin\",\"diffusion.push\":\"users\"}},\"attachments\":{}}],\"maps\":{},\"query\":{\"queryKey\":null},\"cursor\":{\"limit\":100,\"after\":null,\"before\":null,\"order\":null}},\"error_code\":null,\"error_info\":null}"
+ },
+ "headers": {
+ "date": [
+ "Fri, 07 Jun 2019 20:23:04 GMT"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "vary": [
+ "Accept-Encoding"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "content-length": [
+ "587"
+ ],
+ "connection": [
+ "keep-alive"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "referrer-policy": [
+ "no-referrer",
+ "strict-origin-when-cross-origin"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=31536000; includeSubdomains; preload"
+ ]
+ }
+ },
+ "request": {
+ "method": "POST",
+ "uri": "https://phab.mercurial-scm.org//api/diffusion.repository.search",
+ "body": "constraints%5Bcallsigns%5D%5B0%5D=HG&api.token=cli-hahayouwish",
+ "headers": {
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "content-length": [
+ "81"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.0.1+253-f2ebe61e9a8e+20190607)"
+ ]
+ }
+ }
+ },
+ {
+ "response": {
+ "status": {
+ "message": "OK",
+ "code": 200
+ },
+ "body": {
+ "string": "{\"result\":{\"id\":1989,\"phid\":\"PHID-DIFF-3mtjdk4tjjkaw4arccah\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/differential\\/diff\\/1989\\/\"},\"error_code\":null,\"error_info\":null}"
+ },
+ "headers": {
+ "date": [
+ "Fri, 07 Jun 2019 20:23:05 GMT"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "vary": [
+ "Accept-Encoding"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "content-length": [
+ "172"
+ ],
+ "connection": [
+ "keep-alive"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "referrer-policy": [
+ "no-referrer",
+ "strict-origin-when-cross-origin"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=31536000; includeSubdomains; preload"
+ ]
+ }
+ },
+ "request": {
+ "method": "POST",
+ "uri": "https://phab.mercurial-scm.org//api/differential.createrawdiff",
+ "body": "repositoryPHID=PHID-REPO-bvunnehri4u2isyr7bc3&diff=diff+--git+a%2Fcomment+b%2Fcomment%0Anew+file+mode+100644%0A---+%2Fdev%2Fnull%0A%2B%2B%2B+b%2Fcomment%0A%40%40+-0%2C0+%2B1%2C1+%40%40%0A%2Bcomment%0A&api.token=cli-hahayouwish",
+ "headers": {
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "content-length": [
+ "243"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.0.1+253-f2ebe61e9a8e+20190607)"
+ ]
+ }
+ }
+ },
+ {
+ "response": {
+ "status": {
+ "message": "OK",
+ "code": 200
+ },
+ "body": {
+ "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
+ },
+ "headers": {
+ "date": [
+ "Fri, 07 Jun 2019 20:23:06 GMT"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "vary": [
+ "Accept-Encoding"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "content-length": [
+ "51"
+ ],
+ "connection": [
+ "keep-alive"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "referrer-policy": [
+ "no-referrer",
+ "strict-origin-when-cross-origin"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=31536000; includeSubdomains; preload"
+ ]
+ }
+ },
+ "request": {
+ "method": "POST",
+ "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
+ "body": "api.token=cli-hahayouwish&data=%7B%22branch%22%3A+%22default%22%2C+%22date%22%3A+%220+0%22%2C+%22node%22%3A+%22a7ee4bac036ae424bfc9e1a4228c4fa06d637f53%22%2C+%22parent%22%3A+%22a19f1434f9a578325eb9799c9961b5465d4e6e40%22%2C+%22user%22%3A+%22test%22%7D&name=hg%3Ameta&diff_id=1989",
+ "headers": {
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "content-length": [
+ "296"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.0.1+253-f2ebe61e9a8e+20190607)"
+ ]
+ }
+ }
+ },
+ {
+ "response": {
+ "status": {
+ "message": "OK",
+ "code": 200
+ },
+ "body": {
+ "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
+ },
+ "headers": {
+ "date": [
+ "Fri, 07 Jun 2019 20:23:07 GMT"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "vary": [
+ "Accept-Encoding"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "content-length": [
+ "51"
+ ],
+ "connection": [
+ "keep-alive"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "referrer-policy": [
+ "no-referrer",
+ "strict-origin-when-cross-origin"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=31536000; includeSubdomains; preload"
+ ]
+ }
+ },
+ "request": {
+ "method": "POST",
+ "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
+ "body": "api.token=cli-hahayouwish&data=%7B%22a7ee4bac036ae424bfc9e1a4228c4fa06d637f53%22%3A+%7B%22author%22%3A+%22test%22%2C+%22authorEmail%22%3A+%22test%22%2C+%22branch%22%3A+%22default%22%2C+%22commit%22%3A+%22a7ee4bac036ae424bfc9e1a4228c4fa06d637f53%22%2C+%22parents%22%3A+%5B%22a19f1434f9a578325eb9799c9961b5465d4e6e40%22%5D%2C+%22time%22%3A+0%7D%7D&name=local%3Acommits&diff_id=1989",
+ "headers": {
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "content-length": [
+ "396"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.0.1+253-f2ebe61e9a8e+20190607)"
+ ]
+ }
+ }
+ },
+ {
+ "response": {
+ "status": {
+ "message": "OK",
+ "code": 200
+ },
+ "body": {
+ "string": "{\"result\":{\"errors\":[],\"fields\":{\"title\":\"create comment for phabricator test\"},\"revisionIDFieldInfo\":{\"value\":null,\"validDomain\":\"https:\\/\\/phab.mercurial-scm.org\"},\"transactions\":[{\"type\":\"title\",\"value\":\"create comment for phabricator test\"}]},\"error_code\":null,\"error_info\":null}"
+ },
+ "headers": {
+ "date": [
+ "Fri, 07 Jun 2019 20:23:07 GMT"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "vary": [
+ "Accept-Encoding"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "content-length": [
+ "288"
+ ],
+ "connection": [
+ "keep-alive"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "referrer-policy": [
+ "no-referrer",
+ "strict-origin-when-cross-origin"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=31536000; includeSubdomains; preload"
+ ]
+ }
+ },
+ "request": {
+ "method": "POST",
+ "uri": "https://phab.mercurial-scm.org//api/differential.parsecommitmessage",
+ "body": "corpus=create+comment+for+phabricator+test&api.token=cli-hahayouwish",
+ "headers": {
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "content-length": [
+ "85"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.0.1+253-f2ebe61e9a8e+20190607)"
+ ]
+ }
+ }
+ },
+ {
+ "response": {
+ "status": {
+ "message": "OK",
+ "code": 200
+ },
+ "body": {
+ "string": "{\"result\":{\"object\":{\"id\":1253,\"phid\":\"PHID-DREV-4rhqd6v3yxbtodc7wbv7\"},\"transactions\":[{\"phid\":\"PHID-XACT-DREV-g73sutb5nezcyh6\"},{\"phid\":\"PHID-XACT-DREV-yg6ysul7pcxtqce\"},{\"phid\":\"PHID-XACT-DREV-vxhpgk64u3kax45\"},{\"phid\":\"PHID-XACT-DREV-mkt5rq3racrpzhe\"},{\"phid\":\"PHID-XACT-DREV-s7la723tgqhwovt\"}]},\"error_code\":null,\"error_info\":null}"
+ },
+ "headers": {
+ "date": [
+ "Fri, 07 Jun 2019 20:23:08 GMT"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "vary": [
+ "Accept-Encoding"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "content-length": [
+ "336"
+ ],
+ "connection": [
+ "keep-alive"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "referrer-policy": [
+ "no-referrer",
+ "strict-origin-when-cross-origin"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=31536000; includeSubdomains; preload"
+ ]
+ }
+ },
+ "request": {
+ "method": "POST",
+ "uri": "https://phab.mercurial-scm.org//api/differential.revision.edit",
+ "body": "transactions%5B0%5D%5Bvalue%5D=PHID-DIFF-3mtjdk4tjjkaw4arccah&transactions%5B0%5D%5Btype%5D=update&transactions%5B1%5D%5Bvalue%5D=For+default+branch&transactions%5B1%5D%5Btype%5D=comment&transactions%5B2%5D%5Bvalue%5D=create+comment+for+phabricator+test&transactions%5B2%5D%5Btype%5D=title&api.token=cli-hahayouwish",
+ "headers": {
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "content-length": [
+ "332"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.0.1+253-f2ebe61e9a8e+20190607)"
+ ]
+ }
+ }
+ },
+ {
+ "response": {
+ "status": {
+ "message": "OK",
+ "code": 200
+ },
+ "body": {
+ "string": "{\"result\":[{\"id\":\"1253\",\"phid\":\"PHID-DREV-4rhqd6v3yxbtodc7wbv7\",\"title\":\"create comment for phabricator test\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/D1253\",\"dateCreated\":\"1559938988\",\"dateModified\":\"1559938988\",\"authorPHID\":\"PHID-USER-qmzis76vb2yh3ogldu6r\",\"status\":\"0\",\"statusName\":\"Draft\",\"properties\":{\"draft.broadcast\":false,\"lines.added\":1,\"lines.removed\":0},\"branch\":null,\"summary\":\"\",\"testPlan\":\"\",\"lineCount\":\"1\",\"activeDiffPHID\":\"PHID-DIFF-3mtjdk4tjjkaw4arccah\",\"diffs\":[\"1989\"],\"commits\":[],\"reviewers\":[],\"ccs\":[],\"hashes\":[],\"auxiliary\":{\"bugzilla.bug-id\":null,\"phabricator:projects\":[\"PHID-PROJ-f2a3wl5wxtqdtfgdjqzk\"],\"phabricator:depends-on\":[]},\"repositoryPHID\":\"PHID-REPO-bvunnehri4u2isyr7bc3\",\"sourcePath\":null}],\"error_code\":null,\"error_info\":null}"
+ },
+ "headers": {
+ "date": [
+ "Fri, 07 Jun 2019 20:23:09 GMT"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "vary": [
+ "Accept-Encoding"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "content-length": [
+ "773"
+ ],
+ "connection": [
+ "keep-alive"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "referrer-policy": [
+ "no-referrer",
+ "strict-origin-when-cross-origin"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=31536000; includeSubdomains; preload"
+ ]
+ }
+ },
+ "request": {
+ "method": "POST",
+ "uri": "https://phab.mercurial-scm.org//api/differential.query",
+ "body": "api.token=cli-hahayouwish&ids%5B0%5D=1253",
+ "headers": {
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "content-length": [
+ "58"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.0.1+253-f2ebe61e9a8e+20190607)"
+ ]
+ }
+ }
+ },
+ {
+ "response": {
+ "status": {
+ "message": "OK",
+ "code": 200
+ },
+ "body": {
+ "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
+ },
+ "headers": {
+ "date": [
+ "Fri, 07 Jun 2019 20:23:10 GMT"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "vary": [
+ "Accept-Encoding"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "content-length": [
+ "51"
+ ],
+ "connection": [
+ "keep-alive"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "referrer-policy": [
+ "no-referrer",
+ "strict-origin-when-cross-origin"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=31536000; includeSubdomains; preload"
+ ]
+ }
+ },
+ "request": {
+ "method": "POST",
+ "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
+ "body": "api.token=cli-hahayouwish&data=%7B%22branch%22%3A+%22default%22%2C+%22date%22%3A+%220+0%22%2C+%22node%22%3A+%2281fce7de1b7d8ea6b8309a58058d3b5793506c34%22%2C+%22parent%22%3A+%22a19f1434f9a578325eb9799c9961b5465d4e6e40%22%2C+%22user%22%3A+%22test%22%7D&name=hg%3Ameta&diff_id=1989",
+ "headers": {
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "content-length": [
+ "296"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.0.1+253-f2ebe61e9a8e+20190607)"
+ ]
+ }
+ }
+ },
+ {
+ "response": {
+ "status": {
+ "message": "OK",
+ "code": 200
+ },
+ "body": {
+ "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
+ },
+ "headers": {
+ "date": [
+ "Fri, 07 Jun 2019 20:23:10 GMT"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "vary": [
+ "Accept-Encoding"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "content-length": [
+ "51"
+ ],
+ "connection": [
+ "keep-alive"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "referrer-policy": [
+ "no-referrer",
+ "strict-origin-when-cross-origin"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=31536000; includeSubdomains; preload"
+ ]
+ }
+ },
+ "request": {
+ "method": "POST",
+ "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
+ "body": "api.token=cli-hahayouwish&data=%7B%2281fce7de1b7d8ea6b8309a58058d3b5793506c34%22%3A+%7B%22author%22%3A+%22test%22%2C+%22authorEmail%22%3A+%22test%22%2C+%22branch%22%3A+%22default%22%2C+%22commit%22%3A+%2281fce7de1b7d8ea6b8309a58058d3b5793506c34%22%2C+%22parents%22%3A+%5B%22a19f1434f9a578325eb9799c9961b5465d4e6e40%22%5D%2C+%22time%22%3A+0%7D%7D&name=local%3Acommits&diff_id=1989",
+ "headers": {
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "content-length": [
+ "396"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.0.1+253-f2ebe61e9a8e+20190607)"
+ ]
+ }
+ }
+ }
+ ]
+}
\ No newline at end of file
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/phabricator/phabsend-comment-updated.json Mon Jul 22 14:00:33 2019 -0400
@@ -0,0 +1,581 @@
+{
+ "interactions": [
+ {
+ "request": {
+ "method": "POST",
+ "body": "api.token=cli-hahayouwish&revisionIDs%5B0%5D=1253",
+ "uri": "https://phab.mercurial-scm.org//api/differential.querydiffs",
+ "headers": {
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.0.1+253-f2ebe61e9a8e+20190607)"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "content-length": [
+ "66"
+ ]
+ }
+ },
+ "response": {
+ "status": {
+ "code": 200,
+ "message": "OK"
+ },
+ "body": {
+ "string": "{\"result\":{\"1989\":{\"id\":\"1989\",\"revisionID\":\"1253\",\"dateCreated\":\"1559938985\",\"dateModified\":\"1559938988\",\"sourceControlBaseRevision\":null,\"sourceControlPath\":null,\"sourceControlSystem\":null,\"branch\":null,\"bookmark\":null,\"creationMethod\":\"web\",\"description\":null,\"unitStatus\":\"4\",\"lintStatus\":\"4\",\"changes\":[{\"id\":\"5273\",\"metadata\":{\"line:first\":1,\"hash.effect\":\"mzg_LBhhVYqb\"},\"oldPath\":null,\"currentPath\":\"comment\",\"awayPaths\":[],\"oldProperties\":[],\"newProperties\":{\"unix:filemode\":\"100644\"},\"type\":\"1\",\"fileType\":\"1\",\"commitHash\":null,\"addLines\":\"1\",\"delLines\":\"0\",\"hunks\":[{\"oldOffset\":\"0\",\"newOffset\":\"1\",\"oldLength\":\"0\",\"newLength\":\"1\",\"addLines\":null,\"delLines\":null,\"isMissingOldNewline\":null,\"isMissingNewNewline\":null,\"corpus\":\"+comment\\n\"}]}],\"properties\":{\"hg:meta\":{\"branch\":\"default\",\"date\":\"0 0\",\"node\":\"0025df7d064f9c916862d19e207429a0f799fa7d\",\"parent\":\"a19f1434f9a578325eb9799c9961b5465d4e6e40\",\"user\":\"test\"},\"local:commits\":{\"0025df7d064f9c916862d19e207429a0f799fa7d\":{\"author\":\"test\",\"authorEmail\":\"test\",\"branch\":\"default\",\"commit\":\"0025df7d064f9c916862d19e207429a0f799fa7d\",\"parents\":[\"a19f1434f9a578325eb9799c9961b5465d4e6e40\"],\"time\":0}}},\"authorName\":\"test\",\"authorEmail\":\"test\"}},\"error_code\":null,\"error_info\":null}"
+ },
+ "headers": {
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "connection": [
+ "keep-alive"
+ ],
+ "vary": [
+ "Accept-Encoding"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "strict-transport-security": [
+ "max-age=31536000; includeSubdomains; preload"
+ ],
+ "date": [
+ "Fri, 07 Jun 2019 20:26:57 GMT"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "referrer-policy": [
+ "no-referrer",
+ "strict-origin-when-cross-origin"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "content-length": [
+ "1243"
+ ],
+ "x-xss-protection": [
+ "1; mode=block"
+ ]
+ }
+ }
+ },
+ {
+ "request": {
+ "method": "POST",
+ "body": "constraints%5Bcallsigns%5D%5B0%5D=HG&api.token=cli-hahayouwish",
+ "uri": "https://phab.mercurial-scm.org//api/diffusion.repository.search",
+ "headers": {
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.0.1+253-f2ebe61e9a8e+20190607)"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "content-length": [
+ "81"
+ ]
+ }
+ },
+ "response": {
+ "status": {
+ "code": 200,
+ "message": "OK"
+ },
+ "body": {
+ "string": "{\"result\":{\"data\":[{\"id\":12,\"type\":\"REPO\",\"phid\":\"PHID-REPO-bvunnehri4u2isyr7bc3\",\"fields\":{\"name\":\"Mercurial\",\"vcs\":\"hg\",\"callsign\":\"HG\",\"shortName\":\"Mercurial\",\"status\":\"active\",\"isImporting\":false,\"almanacServicePHID\":null,\"refRules\":{\"fetchRules\":[],\"trackRules\":[],\"permanentRefRules\":[]},\"spacePHID\":null,\"dateCreated\":1523292927,\"dateModified\":1523297359,\"policy\":{\"view\":\"public\",\"edit\":\"admin\",\"diffusion.push\":\"users\"}},\"attachments\":{}}],\"maps\":{},\"query\":{\"queryKey\":null},\"cursor\":{\"limit\":100,\"after\":null,\"before\":null,\"order\":null}},\"error_code\":null,\"error_info\":null}"
+ },
+ "headers": {
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "connection": [
+ "keep-alive"
+ ],
+ "vary": [
+ "Accept-Encoding"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "strict-transport-security": [
+ "max-age=31536000; includeSubdomains; preload"
+ ],
+ "date": [
+ "Fri, 07 Jun 2019 20:26:58 GMT"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "referrer-policy": [
+ "no-referrer",
+ "strict-origin-when-cross-origin"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "content-length": [
+ "587"
+ ],
+ "x-xss-protection": [
+ "1; mode=block"
+ ]
+ }
+ }
+ },
+ {
+ "request": {
+ "method": "POST",
+ "body": "repositoryPHID=PHID-REPO-bvunnehri4u2isyr7bc3&api.token=cli-hahayouwish&diff=diff+--git+a%2Fcomment+b%2Fcomment%0Anew+file+mode+100644%0A---+%2Fdev%2Fnull%0A%2B%2B%2B+b%2Fcomment%0A%40%40+-0%2C0+%2B1%2C2+%40%40%0A%2Bcomment%0A%2Bcomment2%0A",
+ "uri": "https://phab.mercurial-scm.org//api/differential.createrawdiff",
+ "headers": {
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.0.1+253-f2ebe61e9a8e+20190607)"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "content-length": [
+ "257"
+ ]
+ }
+ },
+ "response": {
+ "status": {
+ "code": 200,
+ "message": "OK"
+ },
+ "body": {
+ "string": "{\"result\":{\"id\":1990,\"phid\":\"PHID-DIFF-xfa4yzc5h2cvjfhpx4dv\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/differential\\/diff\\/1990\\/\"},\"error_code\":null,\"error_info\":null}"
+ },
+ "headers": {
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "connection": [
+ "keep-alive"
+ ],
+ "vary": [
+ "Accept-Encoding"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "strict-transport-security": [
+ "max-age=31536000; includeSubdomains; preload"
+ ],
+ "date": [
+ "Fri, 07 Jun 2019 20:26:59 GMT"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "referrer-policy": [
+ "no-referrer",
+ "strict-origin-when-cross-origin"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "content-length": [
+ "172"
+ ],
+ "x-xss-protection": [
+ "1; mode=block"
+ ]
+ }
+ }
+ },
+ {
+ "request": {
+ "method": "POST",
+ "body": "diff_id=1990&data=%7B%22branch%22%3A+%22default%22%2C+%22date%22%3A+%220+0%22%2C+%22node%22%3A+%221acd4b60af38c934182468719a8a431248f49bef%22%2C+%22parent%22%3A+%22a19f1434f9a578325eb9799c9961b5465d4e6e40%22%2C+%22user%22%3A+%22test%22%7D&api.token=cli-hahayouwish&name=hg%3Ameta",
+ "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
+ "headers": {
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.0.1+253-f2ebe61e9a8e+20190607)"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "content-length": [
+ "296"
+ ]
+ }
+ },
+ "response": {
+ "status": {
+ "code": 200,
+ "message": "OK"
+ },
+ "body": {
+ "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
+ },
+ "headers": {
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "connection": [
+ "keep-alive"
+ ],
+ "vary": [
+ "Accept-Encoding"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "strict-transport-security": [
+ "max-age=31536000; includeSubdomains; preload"
+ ],
+ "date": [
+ "Fri, 07 Jun 2019 20:26:59 GMT"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "referrer-policy": [
+ "no-referrer",
+ "strict-origin-when-cross-origin"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "content-length": [
+ "51"
+ ],
+ "x-xss-protection": [
+ "1; mode=block"
+ ]
+ }
+ }
+ },
+ {
+ "request": {
+ "method": "POST",
+ "body": "diff_id=1990&data=%7B%221acd4b60af38c934182468719a8a431248f49bef%22%3A+%7B%22author%22%3A+%22test%22%2C+%22authorEmail%22%3A+%22test%22%2C+%22branch%22%3A+%22default%22%2C+%22commit%22%3A+%221acd4b60af38c934182468719a8a431248f49bef%22%2C+%22parents%22%3A+%5B%22a19f1434f9a578325eb9799c9961b5465d4e6e40%22%5D%2C+%22time%22%3A+0%7D%7D&api.token=cli-hahayouwish&name=local%3Acommits",
+ "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
+ "headers": {
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.0.1+253-f2ebe61e9a8e+20190607)"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "content-length": [
+ "396"
+ ]
+ }
+ },
+ "response": {
+ "status": {
+ "code": 200,
+ "message": "OK"
+ },
+ "body": {
+ "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
+ },
+ "headers": {
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "connection": [
+ "keep-alive"
+ ],
+ "vary": [
+ "Accept-Encoding"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "strict-transport-security": [
+ "max-age=31536000; includeSubdomains; preload"
+ ],
+ "date": [
+ "Fri, 07 Jun 2019 20:27:00 GMT"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "referrer-policy": [
+ "no-referrer",
+ "strict-origin-when-cross-origin"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "content-length": [
+ "51"
+ ],
+ "x-xss-protection": [
+ "1; mode=block"
+ ]
+ }
+ }
+ },
+ {
+ "request": {
+ "method": "POST",
+ "body": "api.token=cli-hahayouwish&corpus=create+comment+for+phabricator+test%0A%0ADifferential+Revision%3A+https%3A%2F%2Fphab.mercurial-scm.org%2FD1253",
+ "uri": "https://phab.mercurial-scm.org//api/differential.parsecommitmessage",
+ "headers": {
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.0.1+253-f2ebe61e9a8e+20190607)"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "content-length": [
+ "165"
+ ]
+ }
+ },
+ "response": {
+ "status": {
+ "code": 200,
+ "message": "OK"
+ },
+ "body": {
+ "string": "{\"result\":{\"errors\":[],\"fields\":{\"title\":\"create comment for phabricator test\",\"revisionID\":1253},\"revisionIDFieldInfo\":{\"value\":1253,\"validDomain\":\"https:\\/\\/phab.mercurial-scm.org\"},\"transactions\":[{\"type\":\"title\",\"value\":\"create comment for phabricator test\"}]},\"error_code\":null,\"error_info\":null}"
+ },
+ "headers": {
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "connection": [
+ "keep-alive"
+ ],
+ "vary": [
+ "Accept-Encoding"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "strict-transport-security": [
+ "max-age=31536000; includeSubdomains; preload"
+ ],
+ "date": [
+ "Fri, 07 Jun 2019 20:27:01 GMT"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "referrer-policy": [
+ "no-referrer",
+ "strict-origin-when-cross-origin"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "content-length": [
+ "306"
+ ],
+ "x-xss-protection": [
+ "1; mode=block"
+ ]
+ }
+ }
+ },
+ {
+ "request": {
+ "method": "POST",
+ "body": "api.token=cli-hahayouwish&transactions%5B0%5D%5Btype%5D=update&transactions%5B0%5D%5Bvalue%5D=PHID-DIFF-xfa4yzc5h2cvjfhpx4dv&transactions%5B1%5D%5Btype%5D=comment&transactions%5B1%5D%5Bvalue%5D=Address+review+comments&transactions%5B2%5D%5Btype%5D=title&transactions%5B2%5D%5Bvalue%5D=create+comment+for+phabricator+test&objectIdentifier=1253",
+ "uri": "https://phab.mercurial-scm.org//api/differential.revision.edit",
+ "headers": {
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.0.1+253-f2ebe61e9a8e+20190607)"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "content-length": [
+ "359"
+ ]
+ }
+ },
+ "response": {
+ "status": {
+ "code": 200,
+ "message": "OK"
+ },
+ "body": {
+ "string": "{\"result\":{\"object\":{\"id\":1253,\"phid\":\"PHID-DREV-4rhqd6v3yxbtodc7wbv7\"},\"transactions\":[{\"phid\":\"PHID-XACT-DREV-punz3dredrxghth\"},{\"phid\":\"PHID-XACT-DREV-ykwxppmzdgrtgye\"}]},\"error_code\":null,\"error_info\":null}"
+ },
+ "headers": {
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "connection": [
+ "keep-alive"
+ ],
+ "vary": [
+ "Accept-Encoding"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "strict-transport-security": [
+ "max-age=31536000; includeSubdomains; preload"
+ ],
+ "date": [
+ "Fri, 07 Jun 2019 20:27:02 GMT"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "referrer-policy": [
+ "no-referrer",
+ "strict-origin-when-cross-origin"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "content-length": [
+ "210"
+ ],
+ "x-xss-protection": [
+ "1; mode=block"
+ ]
+ }
+ }
+ },
+ {
+ "request": {
+ "method": "POST",
+ "body": "api.token=cli-hahayouwish&ids%5B0%5D=1253",
+ "uri": "https://phab.mercurial-scm.org//api/differential.query",
+ "headers": {
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.0.1+253-f2ebe61e9a8e+20190607)"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "content-length": [
+ "58"
+ ]
+ }
+ },
+ "response": {
+ "status": {
+ "code": 200,
+ "message": "OK"
+ },
+ "body": {
+ "string": "{\"result\":[{\"id\":\"1253\",\"phid\":\"PHID-DREV-4rhqd6v3yxbtodc7wbv7\",\"title\":\"create comment for phabricator test\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/D1253\",\"dateCreated\":\"1559938988\",\"dateModified\":\"1559939221\",\"authorPHID\":\"PHID-USER-qmzis76vb2yh3ogldu6r\",\"status\":\"0\",\"statusName\":\"Needs Review\",\"properties\":{\"draft.broadcast\":true,\"lines.added\":2,\"lines.removed\":0,\"buildables\":{\"PHID-HMBB-hsvjwe4uccbkgjpvffhz\":{\"status\":\"passed\"}}},\"branch\":null,\"summary\":\"\",\"testPlan\":\"\",\"lineCount\":\"2\",\"activeDiffPHID\":\"PHID-DIFF-xfa4yzc5h2cvjfhpx4dv\",\"diffs\":[\"1990\",\"1989\"],\"commits\":[],\"reviewers\":[],\"ccs\":[],\"hashes\":[],\"auxiliary\":{\"bugzilla.bug-id\":null,\"phabricator:projects\":[],\"phabricator:depends-on\":[]},\"repositoryPHID\":\"PHID-REPO-bvunnehri4u2isyr7bc3\",\"sourcePath\":null}],\"error_code\":null,\"error_info\":null}"
+ },
+ "headers": {
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "connection": [
+ "keep-alive"
+ ],
+ "vary": [
+ "Accept-Encoding"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "strict-transport-security": [
+ "max-age=31536000; includeSubdomains; preload"
+ ],
+ "date": [
+ "Fri, 07 Jun 2019 20:27:02 GMT"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "referrer-policy": [
+ "no-referrer",
+ "strict-origin-when-cross-origin"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "content-length": [
+ "822"
+ ],
+ "x-xss-protection": [
+ "1; mode=block"
+ ]
+ }
+ }
+ }
+ ],
+ "version": 1
+}
\ No newline at end of file
--- a/tests/phabricator/phabsend-create-alpha.json Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/phabricator/phabsend-create-alpha.json Mon Jul 22 14:00:33 2019 -0400
@@ -1,617 +1,617 @@
{
+ "version": 1,
"interactions": [
{
"request": {
- "method": "POST",
- "body": "constraints%5Bcallsigns%5D%5B0%5D=HG&api.token=cli-hahayouwish",
- "uri": "https://phab.mercurial-scm.org//api/diffusion.repository.search",
"headers": {
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.0+93-d811f17090a3+20190505)"
+ ],
+ "content-length": [
+ "93"
+ ],
"content-type": [
"application/x-www-form-urlencoded"
],
"accept": [
"application/mercurial-0.1"
],
- "user-agent": [
- "mercurial/proto-1.0 (Mercurial 4.9+477-7c86ec0ca5c5+20190303)"
- ],
"host": [
"phab.mercurial-scm.org"
- ],
- "content-length": [
- "79"
]
- }
+ },
+ "body": "api.token=cli-hahayouwish&constraints%5Bcallsigns%5D%5B0%5D=HG",
+ "uri": "https://phab.mercurial-scm.org//api/diffusion.repository.search",
+ "method": "POST"
},
"response": {
"status": {
"code": 200,
"message": "OK"
},
- "body": {
- "string": "{\"result\":{\"data\":[{\"id\":2,\"type\":\"REPO\",\"phid\":\"PHID-REPO-bvunnehri4u2isyr7bc3\",\"fields\":{\"name\":\"Mercurial\",\"vcs\":\"hg\",\"callsign\":\"HG\",\"shortName\":null,\"status\":\"active\",\"isImporting\":false,\"spacePHID\":null,\"dateCreated\":1498761653,\"dateModified\":1500403184,\"policy\":{\"view\":\"public\",\"edit\":\"admin\",\"diffusion.push\":\"users\"}},\"attachments\":{}}],\"maps\":{},\"query\":{\"queryKey\":null},\"cursor\":{\"limit\":100,\"after\":null,\"before\":null,\"order\":null}},\"error_code\":null,\"error_info\":null}"
- },
"headers": {
- "expires": [
- "Sat, 01 Jan 2000 00:00:00 GMT"
- ],
"x-xss-protection": [
"1; mode=block"
],
- "transfer-encoding": [
- "chunked"
- ],
- "date": [
- "Sun, 03 Mar 2019 00:12:23 GMT"
- ],
- "x-frame-options": [
- "Deny"
- ],
"cache-control": [
"no-store"
],
"content-type": [
"application/json"
],
+ "date": [
+ "Sun, 05 May 2019 13:31:00 GMT"
+ ],
+ "connection": [
+ "keep-alive"
+ ],
+ "strict-transport-security": [
+ "max-age=31536000; includeSubdomains; preload"
+ ],
+ "vary": [
+ "Accept-Encoding"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "content-length": [
+ "549"
+ ],
"x-content-type-options": [
"nosniff"
],
- "server": [
- "Apache/2.4.10 (Debian)"
- ],
- "set-cookie": [
- "phsid=A%2Fpywot5xerq4gs2tjxw3gnadzdg6vomqmfcnwqddp; expires=Fri, 01-Mar-2024 00:12:23 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly"
- ],
- "strict-transport-security": [
- "max-age=0; includeSubdomains; preload"
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
]
+ },
+ "body": {
+ "string": "{\"result\":{\"data\":[{\"id\":10,\"type\":\"REPO\",\"phid\":\"PHID-REPO-bvunnehri4u2isyr7bc3\",\"fields\":{\"name\":\"Mercurial\",\"vcs\":\"hg\",\"callsign\":\"HG\",\"shortName\":\"Mercurial\",\"status\":\"active\",\"isImporting\":false,\"almanacServicePHID\":null,\"spacePHID\":null,\"dateCreated\":1507817156,\"dateModified\":1529613276,\"policy\":{\"view\":\"public\",\"edit\":\"admin\",\"diffusion.push\":\"users\"}},\"attachments\":{}}],\"maps\":{},\"query\":{\"queryKey\":null},\"cursor\":{\"limit\":100,\"after\":null,\"before\":null,\"order\":null}},\"error_code\":null,\"error_info\":null}"
}
}
},
{
"request": {
- "method": "POST",
- "body": "repositoryPHID=PHID-REPO-bvunnehri4u2isyr7bc3&api.token=cli-hahayouwish&diff=diff+--git+a%2Falpha+b%2Falpha%0Anew+file+mode+100644%0A---+%2Fdev%2Fnull%0A%2B%2B%2B+b%2Falpha%0A%40%40+-0%2C0+%2B1%2C1+%40%40%0A%2Balpha%0A",
- "uri": "https://phab.mercurial-scm.org//api/differential.createrawdiff",
"headers": {
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.0+93-d811f17090a3+20190505)"
+ ],
+ "content-length": [
+ "235"
+ ],
"content-type": [
"application/x-www-form-urlencoded"
],
"accept": [
"application/mercurial-0.1"
],
- "user-agent": [
- "mercurial/proto-1.0 (Mercurial 4.9+477-7c86ec0ca5c5+20190303)"
- ],
"host": [
"phab.mercurial-scm.org"
- ],
- "content-length": [
- "235"
]
- }
+ },
+ "body": "api.token=cli-hahayouwish&diff=diff+--git+a%2Falpha+b%2Falpha%0Anew+file+mode+100644%0A---+%2Fdev%2Fnull%0A%2B%2B%2B+b%2Falpha%0A%40%40+-0%2C0+%2B1%2C1+%40%40%0A%2Balpha%0A&repositoryPHID=PHID-REPO-bvunnehri4u2isyr7bc3",
+ "uri": "https://phab.mercurial-scm.org//api/differential.createrawdiff",
+ "method": "POST"
},
"response": {
"status": {
"code": 200,
"message": "OK"
},
- "body": {
- "string": "{\"result\":{\"id\":14303,\"phid\":\"PHID-DIFF-allzuauvigfjpv4z6dpi\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/differential\\/diff\\/14303\\/\"},\"error_code\":null,\"error_info\":null}"
- },
"headers": {
- "expires": [
- "Sat, 01 Jan 2000 00:00:00 GMT"
- ],
"x-xss-protection": [
"1; mode=block"
],
- "transfer-encoding": [
- "chunked"
- ],
- "date": [
- "Sun, 03 Mar 2019 00:12:24 GMT"
- ],
- "x-frame-options": [
- "Deny"
- ],
"cache-control": [
"no-store"
],
"content-type": [
"application/json"
],
+ "date": [
+ "Sun, 05 May 2019 13:31:01 GMT"
+ ],
+ "connection": [
+ "keep-alive"
+ ],
+ "strict-transport-security": [
+ "max-age=31536000; includeSubdomains; preload"
+ ],
+ "vary": [
+ "Accept-Encoding"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "content-length": [
+ "172"
+ ],
"x-content-type-options": [
"nosniff"
],
- "server": [
- "Apache/2.4.10 (Debian)"
- ],
- "set-cookie": [
- "phsid=A%2F2n2dlkkwzljrpzfghpdsflbt4ftnrwcc446dzcy5; expires=Fri, 01-Mar-2024 00:12:24 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly"
- ],
- "strict-transport-security": [
- "max-age=0; includeSubdomains; preload"
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
]
+ },
+ "body": {
+ "string": "{\"result\":{\"id\":1899,\"phid\":\"PHID-DIFF-gpg57jico75ouhl2bux2\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/differential\\/diff\\/1899\\/\"},\"error_code\":null,\"error_info\":null}"
}
}
},
{
"request": {
- "method": "POST",
- "body": "diff_id=14303&data=%7B%22user%22%3A+%22test%22%2C+%22parent%22%3A+%220000000000000000000000000000000000000000%22%2C+%22node%22%3A+%22d386117f30e6b1282897bdbde75ac21e095163d4%22%2C+%22date%22%3A+%220+0%22%7D&api.token=cli-hahayouwish&name=hg%3Ameta",
- "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
"headers": {
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.0+93-d811f17090a3+20190505)"
+ ],
+ "content-length": [
+ "296"
+ ],
"content-type": [
"application/x-www-form-urlencoded"
],
"accept": [
"application/mercurial-0.1"
],
- "user-agent": [
- "mercurial/proto-1.0 (Mercurial 4.9+477-7c86ec0ca5c5+20190303)"
- ],
"host": [
"phab.mercurial-scm.org"
- ],
- "content-length": [
- "264"
]
- }
+ },
+ "body": "diff_id=1899&data=%7B%22branch%22%3A+%22default%22%2C+%22date%22%3A+%220+0%22%2C+%22node%22%3A+%22d386117f30e6b1282897bdbde75ac21e095163d4%22%2C+%22parent%22%3A+%220000000000000000000000000000000000000000%22%2C+%22user%22%3A+%22test%22%7D&name=hg%3Ameta&api.token=cli-hahayouwish",
+ "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
+ "method": "POST"
},
"response": {
"status": {
"code": 200,
"message": "OK"
},
- "body": {
- "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
- },
"headers": {
- "expires": [
- "Sat, 01 Jan 2000 00:00:00 GMT"
- ],
"x-xss-protection": [
"1; mode=block"
],
- "transfer-encoding": [
- "chunked"
- ],
- "date": [
- "Sun, 03 Mar 2019 00:12:25 GMT"
- ],
- "x-frame-options": [
- "Deny"
- ],
"cache-control": [
"no-store"
],
"content-type": [
"application/json"
],
+ "date": [
+ "Sun, 05 May 2019 13:31:02 GMT"
+ ],
+ "connection": [
+ "keep-alive"
+ ],
+ "strict-transport-security": [
+ "max-age=31536000; includeSubdomains; preload"
+ ],
+ "vary": [
+ "Accept-Encoding"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "content-length": [
+ "51"
+ ],
"x-content-type-options": [
"nosniff"
],
- "server": [
- "Apache/2.4.10 (Debian)"
- ],
- "set-cookie": [
- "phsid=A%2F5mq3t25wu5igv7oufpwcoy32fveozo7wn5wni3gw; expires=Fri, 01-Mar-2024 00:12:25 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly"
- ],
- "strict-transport-security": [
- "max-age=0; includeSubdomains; preload"
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
]
+ },
+ "body": {
+ "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
}
}
},
{
"request": {
- "method": "POST",
- "body": "diff_id=14303&data=%7B%22d386117f30e6b1282897bdbde75ac21e095163d4%22%3A+%7B%22author%22%3A+%22test%22%2C+%22authorEmail%22%3A+%22test%22%2C+%22time%22%3A+0.0%7D%7D&api.token=cli-hahayouwish&name=local%3Acommits",
- "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
"headers": {
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.0+93-d811f17090a3+20190505)"
+ ],
+ "content-length": [
+ "257"
+ ],
"content-type": [
"application/x-www-form-urlencoded"
],
"accept": [
"application/mercurial-0.1"
],
- "user-agent": [
- "mercurial/proto-1.0 (Mercurial 4.9+477-7c86ec0ca5c5+20190303)"
- ],
"host": [
"phab.mercurial-scm.org"
- ],
- "content-length": [
- "227"
]
- }
+ },
+ "body": "diff_id=1899&data=%7B%22d386117f30e6b1282897bdbde75ac21e095163d4%22%3A+%7B%22author%22%3A+%22test%22%2C+%22authorEmail%22%3A+%22test%22%2C+%22branch%22%3A+%22default%22%2C+%22commit%22%3A+%22d386117f30e6b1282897bdbde75ac21e095163d4%22%2C+%22parents%22%3A+%5B%220000000000000000000000000000000000000000%22%5D%2C+%22time%22%3A+0%7D%7D&name=local%3Acommits&api.token=cli-hahayouwish",
+ "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
+ "method": "POST"
},
"response": {
"status": {
"code": 200,
"message": "OK"
},
- "body": {
- "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
- },
"headers": {
- "expires": [
- "Sat, 01 Jan 2000 00:00:00 GMT"
- ],
"x-xss-protection": [
"1; mode=block"
],
- "transfer-encoding": [
- "chunked"
- ],
- "date": [
- "Sun, 03 Mar 2019 00:12:25 GMT"
- ],
- "x-frame-options": [
- "Deny"
- ],
"cache-control": [
"no-store"
],
"content-type": [
"application/json"
],
+ "date": [
+ "Sun, 05 May 2019 13:31:02 GMT"
+ ],
+ "connection": [
+ "keep-alive"
+ ],
+ "strict-transport-security": [
+ "max-age=31536000; includeSubdomains; preload"
+ ],
+ "vary": [
+ "Accept-Encoding"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "content-length": [
+ "51"
+ ],
"x-content-type-options": [
"nosniff"
],
- "server": [
- "Apache/2.4.10 (Debian)"
- ],
- "set-cookie": [
- "phsid=A%2F5nja6g4cnpt63ctjjwykxyceyb7kokfptrzbejoc; expires=Fri, 01-Mar-2024 00:12:25 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly"
- ],
- "strict-transport-security": [
- "max-age=0; includeSubdomains; preload"
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
]
+ },
+ "body": {
+ "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
}
}
},
{
"request": {
- "method": "POST",
- "body": "api.token=cli-hahayouwish&corpus=create+alpha+for+phabricator+test+%E2%82%AC",
- "uri": "https://phab.mercurial-scm.org//api/differential.parsecommitmessage",
"headers": {
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.0+93-d811f17090a3+20190505)"
+ ],
+ "content-length": [
+ "93"
+ ],
"content-type": [
"application/x-www-form-urlencoded"
],
"accept": [
"application/mercurial-0.1"
],
- "user-agent": [
- "mercurial/proto-1.0 (Mercurial 4.9+477-7c86ec0ca5c5+20190303)"
- ],
"host": [
"phab.mercurial-scm.org"
- ],
- "content-length": [
- "93"
]
- }
+ },
+ "body": "api.token=cli-hahayouwish&corpus=create+alpha+for+phabricator+test+%E2%82%AC",
+ "uri": "https://phab.mercurial-scm.org//api/differential.parsecommitmessage",
+ "method": "POST"
},
"response": {
"status": {
"code": 200,
"message": "OK"
},
- "body": {
- "string": "{\"result\":{\"errors\":[],\"fields\":{\"title\":\"create alpha for phabricator test \\u20ac\"},\"revisionIDFieldInfo\":{\"value\":null,\"validDomain\":\"https:\\/\\/phab.mercurial-scm.org\"}},\"error_code\":null,\"error_info\":null}"
- },
"headers": {
- "expires": [
- "Sat, 01 Jan 2000 00:00:00 GMT"
- ],
"x-xss-protection": [
"1; mode=block"
],
- "transfer-encoding": [
- "chunked"
- ],
- "date": [
- "Sun, 03 Mar 2019 00:12:26 GMT"
- ],
- "x-frame-options": [
- "Deny"
- ],
"cache-control": [
"no-store"
],
"content-type": [
"application/json"
],
+ "date": [
+ "Sun, 05 May 2019 13:31:03 GMT"
+ ],
+ "connection": [
+ "keep-alive"
+ ],
+ "strict-transport-security": [
+ "max-age=31536000; includeSubdomains; preload"
+ ],
+ "vary": [
+ "Accept-Encoding"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "content-length": [
+ "298"
+ ],
"x-content-type-options": [
"nosniff"
],
- "server": [
- "Apache/2.4.10 (Debian)"
- ],
- "set-cookie": [
- "phsid=A%2Fkrxawhyvcd4jhv77inuwdmzcci4f7kql6c7l3smz; expires=Fri, 01-Mar-2024 00:12:26 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly"
- ],
- "strict-transport-security": [
- "max-age=0; includeSubdomains; preload"
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
]
+ },
+ "body": {
+ "string": "{\"result\":{\"errors\":[],\"fields\":{\"title\":\"create alpha for phabricator test \\u20ac\"},\"revisionIDFieldInfo\":{\"value\":null,\"validDomain\":\"https:\\/\\/phab.mercurial-scm.org\"},\"transactions\":[{\"type\":\"title\",\"value\":\"create alpha for phabricator test \\u20ac\"}]},\"error_code\":null,\"error_info\":null}"
}
}
},
{
"request": {
- "method": "POST",
- "body": "transactions%5B0%5D%5Btype%5D=update&transactions%5B0%5D%5Bvalue%5D=PHID-DIFF-allzuauvigfjpv4z6dpi&transactions%5B1%5D%5Btype%5D=title&transactions%5B1%5D%5Bvalue%5D=create+alpha+for+phabricator+test+%E2%82%AC&api.token=cli-hahayouwish",
- "uri": "https://phab.mercurial-scm.org//api/differential.revision.edit",
"headers": {
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.0+93-d811f17090a3+20190505)"
+ ],
+ "content-length": [
+ "252"
+ ],
"content-type": [
"application/x-www-form-urlencoded"
],
"accept": [
"application/mercurial-0.1"
],
- "user-agent": [
- "mercurial/proto-1.0 (Mercurial 4.9+477-7c86ec0ca5c5+20190303)"
- ],
"host": [
"phab.mercurial-scm.org"
- ],
- "content-length": [
- "252"
]
- }
+ },
+ "body": "api.token=cli-hahayouwish&transactions%5B0%5D%5Btype%5D=update&transactions%5B0%5D%5Bvalue%5D=PHID-DIFF-gpg57jico75ouhl2bux2&transactions%5B1%5D%5Btype%5D=title&transactions%5B1%5D%5Bvalue%5D=create+alpha+for+phabricator+test+%E2%82%AC",
+ "uri": "https://phab.mercurial-scm.org//api/differential.revision.edit",
+ "method": "POST"
},
"response": {
"status": {
"code": 200,
"message": "OK"
},
- "body": {
- "string": "{\"result\":{\"object\":{\"id\":6054,\"phid\":\"PHID-DREV-6pczsbtdpqjc2nskmxwy\"},\"transactions\":[{\"phid\":\"PHID-XACT-DREV-efgl4j4fesixjog\"},{\"phid\":\"PHID-XACT-DREV-xj7ksjeyfadwf5m\"},{\"phid\":\"PHID-XACT-DREV-gecx5zw42kkuffc\"},{\"phid\":\"PHID-XACT-DREV-asda7zcwgzdadoi\"},{\"phid\":\"PHID-XACT-DREV-ku26t33y6iiugjw\"}]},\"error_code\":null,\"error_info\":null}"
- },
"headers": {
- "expires": [
- "Sat, 01 Jan 2000 00:00:00 GMT"
- ],
"x-xss-protection": [
"1; mode=block"
],
- "transfer-encoding": [
- "chunked"
- ],
- "date": [
- "Sun, 03 Mar 2019 00:12:27 GMT"
- ],
- "x-frame-options": [
- "Deny"
- ],
"cache-control": [
"no-store"
],
"content-type": [
"application/json"
],
+ "date": [
+ "Sun, 05 May 2019 13:31:04 GMT"
+ ],
+ "connection": [
+ "keep-alive"
+ ],
+ "strict-transport-security": [
+ "max-age=31536000; includeSubdomains; preload"
+ ],
+ "vary": [
+ "Accept-Encoding"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "content-length": [
+ "294"
+ ],
"x-content-type-options": [
"nosniff"
],
- "server": [
- "Apache/2.4.10 (Debian)"
- ],
- "set-cookie": [
- "phsid=A%2Fjwgcqb5hvbltjq4jqbpauz7rmmhpuh2rb7phsdmf; expires=Fri, 01-Mar-2024 00:12:27 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly"
- ],
- "strict-transport-security": [
- "max-age=0; includeSubdomains; preload"
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
]
+ },
+ "body": {
+ "string": "{\"result\":{\"object\":{\"id\":1190,\"phid\":\"PHID-DREV-kikesmfxhzpfaxbzgj3l\"},\"transactions\":[{\"phid\":\"PHID-XACT-DREV-od4nnssrqj57m6x\"},{\"phid\":\"PHID-XACT-DREV-2prb5lagzng6uqt\"},{\"phid\":\"PHID-XACT-DREV-qu7o6fgwssovbwb\"},{\"phid\":\"PHID-XACT-DREV-uynfy6n3u6new5f\"}]},\"error_code\":null,\"error_info\":null}"
}
}
},
{
"request": {
- "method": "POST",
- "body": "api.token=cli-hahayouwish&ids%5B0%5D=6054",
- "uri": "https://phab.mercurial-scm.org//api/differential.query",
"headers": {
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.0+93-d811f17090a3+20190505)"
+ ],
+ "content-length": [
+ "58"
+ ],
"content-type": [
"application/x-www-form-urlencoded"
],
"accept": [
"application/mercurial-0.1"
],
- "user-agent": [
- "mercurial/proto-1.0 (Mercurial 4.9+477-7c86ec0ca5c5+20190303)"
- ],
"host": [
"phab.mercurial-scm.org"
- ],
- "content-length": [
- "58"
]
- }
+ },
+ "body": "ids%5B0%5D=1190&api.token=cli-hahayouwish",
+ "uri": "https://phab.mercurial-scm.org//api/differential.query",
+ "method": "POST"
},
"response": {
"status": {
"code": 200,
"message": "OK"
},
- "body": {
- "string": "{\"result\":[{\"id\":\"6054\",\"phid\":\"PHID-DREV-6pczsbtdpqjc2nskmxwy\",\"title\":\"create alpha for phabricator test \\u20ac\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/D6054\",\"dateCreated\":\"1551571947\",\"dateModified\":\"1551571947\",\"authorPHID\":\"PHID-USER-5iy6mkoveguhm2zthvww\",\"status\":\"0\",\"statusName\":\"Needs Review\",\"properties\":[],\"branch\":null,\"summary\":\"\",\"testPlan\":\"\",\"lineCount\":\"1\",\"activeDiffPHID\":\"PHID-DIFF-allzuauvigfjpv4z6dpi\",\"diffs\":[\"14303\"],\"commits\":[],\"reviewers\":{\"PHID-PROJ-3dvcxzznrjru2xmmses3\":\"PHID-PROJ-3dvcxzznrjru2xmmses3\"},\"ccs\":[\"PHID-USER-q42dn7cc3donqriafhjx\"],\"hashes\":[],\"auxiliary\":{\"phabricator:projects\":[],\"phabricator:depends-on\":[]},\"repositoryPHID\":\"PHID-REPO-bvunnehri4u2isyr7bc3\",\"sourcePath\":null}],\"error_code\":null,\"error_info\":null}"
- },
"headers": {
- "expires": [
- "Sat, 01 Jan 2000 00:00:00 GMT"
- ],
"x-xss-protection": [
"1; mode=block"
],
- "transfer-encoding": [
- "chunked"
- ],
- "date": [
- "Sun, 03 Mar 2019 00:12:28 GMT"
- ],
- "x-frame-options": [
- "Deny"
- ],
"cache-control": [
"no-store"
],
"content-type": [
"application/json"
],
+ "date": [
+ "Sun, 05 May 2019 13:31:05 GMT"
+ ],
+ "connection": [
+ "keep-alive"
+ ],
+ "strict-transport-security": [
+ "max-age=31536000; includeSubdomains; preload"
+ ],
+ "vary": [
+ "Accept-Encoding"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "content-length": [
+ "778"
+ ],
"x-content-type-options": [
"nosniff"
],
- "server": [
- "Apache/2.4.10 (Debian)"
- ],
- "set-cookie": [
- "phsid=A%2F3lgkbbyaa646ng5klghjyehsbjxtaqblipnvocuz; expires=Fri, 01-Mar-2024 00:12:28 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly"
- ],
- "strict-transport-security": [
- "max-age=0; includeSubdomains; preload"
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
]
+ },
+ "body": {
+ "string": "{\"result\":[{\"id\":\"1190\",\"phid\":\"PHID-DREV-kikesmfxhzpfaxbzgj3l\",\"title\":\"create alpha for phabricator test \\u20ac\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/D1190\",\"dateCreated\":\"1557063064\",\"dateModified\":\"1557063064\",\"authorPHID\":\"PHID-USER-qmzis76vb2yh3ogldu6r\",\"status\":\"0\",\"statusName\":\"Draft\",\"properties\":{\"draft.broadcast\":false,\"lines.added\":1,\"lines.removed\":0},\"branch\":null,\"summary\":\"\",\"testPlan\":\"\",\"lineCount\":\"1\",\"activeDiffPHID\":\"PHID-DIFF-gpg57jico75ouhl2bux2\",\"diffs\":[\"1899\"],\"commits\":[],\"reviewers\":[],\"ccs\":[],\"hashes\":[],\"auxiliary\":{\"bugzilla.bug-id\":null,\"phabricator:projects\":[\"PHID-PROJ-f2a3wl5wxtqdtfgdjqzk\"],\"phabricator:depends-on\":[]},\"repositoryPHID\":\"PHID-REPO-bvunnehri4u2isyr7bc3\",\"sourcePath\":null}],\"error_code\":null,\"error_info\":null}"
}
}
},
{
"request": {
- "method": "POST",
- "body": "diff_id=14303&data=%7B%22user%22%3A+%22test%22%2C+%22parent%22%3A+%220000000000000000000000000000000000000000%22%2C+%22node%22%3A+%22cb03845d6dd98c72bec766c7ed08c693cc49817a%22%2C+%22date%22%3A+%220+0%22%7D&api.token=cli-hahayouwish&name=hg%3Ameta",
- "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
"headers": {
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.0+93-d811f17090a3+20190505)"
+ ],
+ "content-length": [
+ "296"
+ ],
"content-type": [
"application/x-www-form-urlencoded"
],
"accept": [
"application/mercurial-0.1"
],
- "user-agent": [
- "mercurial/proto-1.0 (Mercurial 4.9+477-7c86ec0ca5c5+20190303)"
- ],
"host": [
"phab.mercurial-scm.org"
- ],
- "content-length": [
- "264"
]
- }
+ },
+ "body": "diff_id=1899&data=%7B%22branch%22%3A+%22default%22%2C+%22date%22%3A+%220+0%22%2C+%22node%22%3A+%22a86ed7d85e866f01161e9f55cee5d116272f508f%22%2C+%22parent%22%3A+%220000000000000000000000000000000000000000%22%2C+%22user%22%3A+%22test%22%7D&name=hg%3Ameta&api.token=cli-hahayouwish",
+ "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
+ "method": "POST"
},
"response": {
"status": {
"code": 200,
"message": "OK"
},
- "body": {
- "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
- },
"headers": {
- "expires": [
- "Sat, 01 Jan 2000 00:00:00 GMT"
- ],
"x-xss-protection": [
"1; mode=block"
],
- "transfer-encoding": [
- "chunked"
- ],
- "date": [
- "Sun, 03 Mar 2019 00:12:28 GMT"
- ],
- "x-frame-options": [
- "Deny"
- ],
"cache-control": [
"no-store"
],
"content-type": [
"application/json"
],
+ "date": [
+ "Sun, 05 May 2019 13:31:06 GMT"
+ ],
+ "connection": [
+ "keep-alive"
+ ],
+ "strict-transport-security": [
+ "max-age=31536000; includeSubdomains; preload"
+ ],
+ "vary": [
+ "Accept-Encoding"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "content-length": [
+ "51"
+ ],
"x-content-type-options": [
"nosniff"
],
- "server": [
- "Apache/2.4.10 (Debian)"
- ],
- "set-cookie": [
- "phsid=A%2Fwjxvlsjqmqwvcljfv6oe2sbometi3gebps6vzrlw; expires=Fri, 01-Mar-2024 00:12:28 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly"
- ],
- "strict-transport-security": [
- "max-age=0; includeSubdomains; preload"
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
]
+ },
+ "body": {
+ "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
}
}
},
{
"request": {
- "method": "POST",
- "body": "diff_id=14303&data=%7B%22cb03845d6dd98c72bec766c7ed08c693cc49817a%22%3A+%7B%22author%22%3A+%22test%22%2C+%22authorEmail%22%3A+%22test%22%2C+%22time%22%3A+0.0%7D%7D&api.token=cli-hahayouwish&name=local%3Acommits",
- "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
"headers": {
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.0+93-d811f17090a3+20190505)"
+ ],
+ "content-length": [
+ "257"
+ ],
"content-type": [
"application/x-www-form-urlencoded"
],
"accept": [
"application/mercurial-0.1"
],
- "user-agent": [
- "mercurial/proto-1.0 (Mercurial 4.9+477-7c86ec0ca5c5+20190303)"
- ],
"host": [
"phab.mercurial-scm.org"
- ],
- "content-length": [
- "227"
]
- }
+ },
+ "body": "diff_id=1899&data=%7B%22a86ed7d85e866f01161e9f55cee5d116272f508f%22%3A+%7B%22author%22%3A+%22test%22%2C+%22authorEmail%22%3A+%22test%22%2C+%22branch%22%3A+%22default%22%2C+%22commit%22%3A+%22a86ed7d85e866f01161e9f55cee5d116272f508f%22%2C+%22parents%22%3A+%5B%220000000000000000000000000000000000000000%22%5D%2C+%22time%22%3A+0%7D%7D&name=local%3Acommits&api.token=cli-hahayouwish",
+ "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
+ "method": "POST"
},
"response": {
"status": {
"code": 200,
"message": "OK"
},
- "body": {
- "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
- },
"headers": {
- "expires": [
- "Sat, 01 Jan 2000 00:00:00 GMT"
- ],
"x-xss-protection": [
"1; mode=block"
],
- "transfer-encoding": [
- "chunked"
- ],
- "date": [
- "Sun, 03 Mar 2019 00:12:29 GMT"
- ],
- "x-frame-options": [
- "Deny"
- ],
"cache-control": [
"no-store"
],
"content-type": [
"application/json"
],
+ "date": [
+ "Sun, 05 May 2019 13:31:06 GMT"
+ ],
+ "connection": [
+ "keep-alive"
+ ],
+ "strict-transport-security": [
+ "max-age=31536000; includeSubdomains; preload"
+ ],
+ "vary": [
+ "Accept-Encoding"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "content-length": [
+ "51"
+ ],
"x-content-type-options": [
"nosniff"
],
- "server": [
- "Apache/2.4.10 (Debian)"
- ],
- "set-cookie": [
- "phsid=A%2Foeyncgzaanzmnhgfc7ecvmu5pq7qju7ewq6tvgrp; expires=Fri, 01-Mar-2024 00:12:29 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly"
- ],
- "strict-transport-security": [
- "max-age=0; includeSubdomains; preload"
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
]
+ },
+ "body": {
+ "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
}
}
}
- ],
- "version": 1
+ ]
}
--- a/tests/phabricator/phabsend-create-public.json Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/phabricator/phabsend-create-public.json Mon Jul 22 14:00:33 2019 -0400
@@ -1,957 +1,957 @@
{
+ "version": 1,
"interactions": [
{
- "response": {
+ "request": {
"headers": {
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.0+93-d811f17090a3+20190505)"
+ ],
+ "content-length": [
+ "93"
+ ],
"content-type": [
- "application/json"
- ],
- "date": [
- "Thu, 10 Jan 2019 04:08:24 GMT"
- ],
- "x-content-type-options": [
- "nosniff"
- ],
- "cache-control": [
- "no-store"
- ],
- "server": [
- "Apache/2.4.10 (Debian)"
- ],
- "x-xss-protection": [
- "1; mode=block"
- ],
- "x-frame-options": [
- "Deny"
- ],
- "expires": [
- "Sat, 01 Jan 2000 00:00:00 GMT"
- ],
- "set-cookie": [
- "phsid=A%2F5faozuxaekgxbyfcc43jvrcmbr5fscbki46mvcvl; expires=Tue, 09-Jan-2024 04:08:24 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly"
- ],
- "transfer-encoding": [
- "chunked"
- ],
- "strict-transport-security": [
- "max-age=0; includeSubdomains; preload"
- ]
- },
- "status": {
- "message": "OK",
- "code": 200
- },
- "body": {
- "string": "{\"result\":{\"data\":[{\"id\":2,\"type\":\"REPO\",\"phid\":\"PHID-REPO-bvunnehri4u2isyr7bc3\",\"fields\":{\"name\":\"Mercurial\",\"vcs\":\"hg\",\"callsign\":\"HG\",\"shortName\":null,\"status\":\"active\",\"isImporting\":false,\"spacePHID\":null,\"dateCreated\":1498761653,\"dateModified\":1500403184,\"policy\":{\"view\":\"public\",\"edit\":\"admin\",\"diffusion.push\":\"users\"}},\"attachments\":{}}],\"maps\":{},\"query\":{\"queryKey\":null},\"cursor\":{\"limit\":100,\"after\":null,\"before\":null,\"order\":null}},\"error_code\":null,\"error_info\":null}"
- }
- },
- "request": {
- "method": "POST",
- "uri": "https://phab.mercurial-scm.org//api/diffusion.repository.search",
- "headers": {
- "content-length": [
- "79"
- ],
+ "application/x-www-form-urlencoded"
+ ],
"accept": [
"application/mercurial-0.1"
- ],
- "content-type": [
- "application/x-www-form-urlencoded"
- ],
- "user-agent": [
- "mercurial/proto-1.0 (Mercurial 4.8.2+682-e2cf04a597cc+20190109)"
- ],
+ ],
"host": [
"phab.mercurial-scm.org"
]
- },
- "body": "constraints%5Bcallsigns%5D%5B0%5D=HG&api.token=cli-hahayouwish"
- }
- },
- {
+ },
+ "body": "api.token=cli-hahayouwish&constraints%5Bcallsigns%5D%5B0%5D=HG",
+ "uri": "https://phab.mercurial-scm.org//api/diffusion.repository.search",
+ "method": "POST"
+ },
"response": {
+ "status": {
+ "code": 200,
+ "message": "OK"
+ },
"headers": {
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
"content-type": [
"application/json"
- ],
+ ],
"date": [
- "Thu, 10 Jan 2019 04:08:25 GMT"
- ],
+ "Sun, 05 May 2019 13:31:20 GMT"
+ ],
+ "connection": [
+ "keep-alive"
+ ],
+ "strict-transport-security": [
+ "max-age=31536000; includeSubdomains; preload"
+ ],
+ "vary": [
+ "Accept-Encoding"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "content-length": [
+ "549"
+ ],
"x-content-type-options": [
"nosniff"
- ],
- "cache-control": [
- "no-store"
- ],
- "server": [
- "Apache/2.4.10 (Debian)"
- ],
- "x-xss-protection": [
- "1; mode=block"
- ],
- "x-frame-options": [
- "Deny"
- ],
+ ],
"expires": [
"Sat, 01 Jan 2000 00:00:00 GMT"
- ],
- "set-cookie": [
- "phsid=A%2Fkb72422mbpyuyoultl4hkizat6qscjgrl5hi6k2n; expires=Tue, 09-Jan-2024 04:08:25 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly"
- ],
- "transfer-encoding": [
- "chunked"
- ],
- "strict-transport-security": [
- "max-age=0; includeSubdomains; preload"
]
- },
- "status": {
- "message": "OK",
- "code": 200
- },
+ },
"body": {
- "string": "{\"result\":{\"id\":13121,\"phid\":\"PHID-DIFF-xrku5f3mlveqr3hhj6a7\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/differential\\/diff\\/13121\\/\"},\"error_code\":null,\"error_info\":null}"
+ "string": "{\"result\":{\"data\":[{\"id\":10,\"type\":\"REPO\",\"phid\":\"PHID-REPO-bvunnehri4u2isyr7bc3\",\"fields\":{\"name\":\"Mercurial\",\"vcs\":\"hg\",\"callsign\":\"HG\",\"shortName\":\"Mercurial\",\"status\":\"active\",\"isImporting\":false,\"almanacServicePHID\":null,\"spacePHID\":null,\"dateCreated\":1507817156,\"dateModified\":1529613276,\"policy\":{\"view\":\"public\",\"edit\":\"admin\",\"diffusion.push\":\"users\"}},\"attachments\":{}}],\"maps\":{},\"query\":{\"queryKey\":null},\"cursor\":{\"limit\":100,\"after\":null,\"before\":null,\"order\":null}},\"error_code\":null,\"error_info\":null}"
}
- },
+ }
+ },
+ {
"request": {
- "method": "POST",
- "uri": "https://phab.mercurial-scm.org//api/differential.createrawdiff",
"headers": {
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.0+93-d811f17090a3+20190505)"
+ ],
"content-length": [
"220"
- ],
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
"accept": [
"application/mercurial-0.1"
- ],
- "content-type": [
- "application/x-www-form-urlencoded"
- ],
- "user-agent": [
- "mercurial/proto-1.0 (Mercurial 4.8.2+682-e2cf04a597cc+20190109)"
- ],
+ ],
"host": [
"phab.mercurial-scm.org"
]
- },
- "body": "repositoryPHID=PHID-REPO-bvunnehri4u2isyr7bc3&diff=diff+--git+a%2Fbeta+b%2Fbeta%0A---+a%2Fbeta%0A%2B%2B%2B+b%2Fbeta%0A%40%40+-1%2C1+%2B1%2C1+%40%40%0A-beta%0A%2Bpublic+change%0A&api.token=cli-hahayouwish"
- }
- },
- {
+ },
+ "body": "api.token=cli-hahayouwish&diff=diff+--git+a%2Fbeta+b%2Fbeta%0A---+a%2Fbeta%0A%2B%2B%2B+b%2Fbeta%0A%40%40+-1%2C1+%2B1%2C1+%40%40%0A-beta%0A%2Bpublic+change%0A&repositoryPHID=PHID-REPO-bvunnehri4u2isyr7bc3",
+ "uri": "https://phab.mercurial-scm.org//api/differential.createrawdiff",
+ "method": "POST"
+ },
"response": {
+ "status": {
+ "code": 200,
+ "message": "OK"
+ },
"headers": {
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
"content-type": [
"application/json"
- ],
+ ],
"date": [
- "Thu, 10 Jan 2019 04:08:25 GMT"
- ],
+ "Sun, 05 May 2019 13:31:21 GMT"
+ ],
+ "connection": [
+ "keep-alive"
+ ],
+ "strict-transport-security": [
+ "max-age=31536000; includeSubdomains; preload"
+ ],
+ "vary": [
+ "Accept-Encoding"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "content-length": [
+ "172"
+ ],
"x-content-type-options": [
"nosniff"
- ],
- "cache-control": [
- "no-store"
- ],
- "server": [
- "Apache/2.4.10 (Debian)"
- ],
- "x-xss-protection": [
- "1; mode=block"
- ],
- "x-frame-options": [
- "Deny"
- ],
+ ],
"expires": [
"Sat, 01 Jan 2000 00:00:00 GMT"
- ],
- "set-cookie": [
- "phsid=A%2Fpyr677mjsjvlsn3wwzl2iignpppablawwz7dn5ap; expires=Tue, 09-Jan-2024 04:08:25 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly"
- ],
- "transfer-encoding": [
- "chunked"
- ],
- "strict-transport-security": [
- "max-age=0; includeSubdomains; preload"
]
- },
- "status": {
- "message": "OK",
- "code": 200
- },
+ },
"body": {
- "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
+ "string": "{\"result\":{\"id\":1902,\"phid\":\"PHID-DIFF-uuzq4s7s72y4ts7ijduc\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/differential\\/diff\\/1902\\/\"},\"error_code\":null,\"error_info\":null}"
}
- },
+ }
+ },
+ {
"request": {
- "method": "POST",
- "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
"headers": {
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.0+93-d811f17090a3+20190505)"
+ ],
"content-length": [
- "264"
- ],
+ "296"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
"accept": [
"application/mercurial-0.1"
- ],
- "content-type": [
- "application/x-www-form-urlencoded"
- ],
- "user-agent": [
- "mercurial/proto-1.0 (Mercurial 4.8.2+682-e2cf04a597cc+20190109)"
- ],
+ ],
"host": [
"phab.mercurial-scm.org"
]
- },
- "body": "name=hg%3Ameta&api.token=cli-hahayouwish&data=%7B%22date%22%3A+%220+0%22%2C+%22user%22%3A+%22test%22%2C+%22node%22%3A+%22540a21d3fbeb7c56cafe726bba6cd9fdcc94f29c%22%2C+%22parent%22%3A+%22c2b605ada280b38c38031b5d31622869c72b0d8d%22%7D&diff_id=13121"
- }
- },
- {
+ },
+ "body": "diff_id=1902&data=%7B%22branch%22%3A+%22default%22%2C+%22date%22%3A+%220+0%22%2C+%22node%22%3A+%2224ffd6bca53a1e05369ed5b8834587c2b2b364da%22%2C+%22parent%22%3A+%222837deb84f4ab1315c1197b8aef10c620465e352%22%2C+%22user%22%3A+%22test%22%7D&name=hg%3Ameta&api.token=cli-hahayouwish",
+ "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
+ "method": "POST"
+ },
"response": {
+ "status": {
+ "code": 200,
+ "message": "OK"
+ },
"headers": {
- "content-type": [
- "application/json"
- ],
- "date": [
- "Thu, 10 Jan 2019 04:08:26 GMT"
- ],
- "x-content-type-options": [
- "nosniff"
- ],
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
"cache-control": [
"no-store"
- ],
- "server": [
- "Apache/2.4.10 (Debian)"
- ],
- "x-xss-protection": [
- "1; mode=block"
- ],
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "date": [
+ "Sun, 05 May 2019 13:31:22 GMT"
+ ],
+ "connection": [
+ "keep-alive"
+ ],
+ "strict-transport-security": [
+ "max-age=31536000; includeSubdomains; preload"
+ ],
+ "vary": [
+ "Accept-Encoding"
+ ],
"x-frame-options": [
"Deny"
- ],
+ ],
+ "content-length": [
+ "51"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
"expires": [
"Sat, 01 Jan 2000 00:00:00 GMT"
- ],
- "set-cookie": [
- "phsid=A%2Fegvbvujn6hykhurzyjtaq4xduxl6sz7gavenbcou; expires=Tue, 09-Jan-2024 04:08:26 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly"
- ],
- "transfer-encoding": [
- "chunked"
- ],
- "strict-transport-security": [
- "max-age=0; includeSubdomains; preload"
]
- },
- "status": {
- "message": "OK",
- "code": 200
- },
+ },
"body": {
"string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
}
- },
+ }
+ },
+ {
"request": {
- "method": "POST",
- "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
"headers": {
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.0+93-d811f17090a3+20190505)"
+ ],
"content-length": [
- "227"
- ],
+ "257"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
"accept": [
"application/mercurial-0.1"
- ],
- "content-type": [
- "application/x-www-form-urlencoded"
- ],
- "user-agent": [
- "mercurial/proto-1.0 (Mercurial 4.8.2+682-e2cf04a597cc+20190109)"
- ],
+ ],
"host": [
"phab.mercurial-scm.org"
]
- },
- "body": "name=local%3Acommits&api.token=cli-hahayouwish&data=%7B%22540a21d3fbeb7c56cafe726bba6cd9fdcc94f29c%22%3A+%7B%22author%22%3A+%22test%22%2C+%22authorEmail%22%3A+%22test%22%2C+%22time%22%3A+0.0%7D%7D&diff_id=13121"
- }
- },
- {
+ },
+ "body": "diff_id=1902&data=%7B%2224ffd6bca53a1e05369ed5b8834587c2b2b364da%22%3A+%7B%22author%22%3A+%22test%22%2C+%22authorEmail%22%3A+%22test%22%2C+%22branch%22%3A+%22default%22%2C+%22commit%22%3A+%2224ffd6bca53a1e05369ed5b8834587c2b2b364da%22%2C+%22parents%22%3A+%5B%222837deb84f4ab1315c1197b8aef10c620465e352%22%5D%2C+%22time%22%3A+0%7D%7D&name=local%3Acommits&api.token=cli-hahayouwish",
+ "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
+ "method": "POST"
+ },
"response": {
+ "status": {
+ "code": 200,
+ "message": "OK"
+ },
"headers": {
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
"content-type": [
"application/json"
- ],
+ ],
"date": [
- "Thu, 10 Jan 2019 04:08:26 GMT"
- ],
+ "Sun, 05 May 2019 13:31:23 GMT"
+ ],
+ "connection": [
+ "keep-alive"
+ ],
+ "strict-transport-security": [
+ "max-age=31536000; includeSubdomains; preload"
+ ],
+ "vary": [
+ "Accept-Encoding"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "content-length": [
+ "51"
+ ],
"x-content-type-options": [
"nosniff"
- ],
- "cache-control": [
- "no-store"
- ],
- "server": [
- "Apache/2.4.10 (Debian)"
- ],
- "x-xss-protection": [
- "1; mode=block"
- ],
- "x-frame-options": [
- "Deny"
- ],
+ ],
"expires": [
"Sat, 01 Jan 2000 00:00:00 GMT"
- ],
- "set-cookie": [
- "phsid=A%2Flbjzqvie4g24kmhnqws2bwhmeiijd3qvvkd22isg; expires=Tue, 09-Jan-2024 04:08:27 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly"
- ],
- "transfer-encoding": [
- "chunked"
- ],
- "strict-transport-security": [
- "max-age=0; includeSubdomains; preload"
]
- },
- "status": {
- "message": "OK",
- "code": 200
- },
+ },
"body": {
- "string": "{\"result\":{\"errors\":[],\"fields\":{\"title\":\"create public change for phabricator testing\"},\"revisionIDFieldInfo\":{\"value\":null,\"validDomain\":\"https:\\/\\/phab.mercurial-scm.org\"}},\"error_code\":null,\"error_info\":null}"
+ "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
}
- },
+ }
+ },
+ {
"request": {
- "method": "POST",
- "uri": "https://phab.mercurial-scm.org//api/differential.parsecommitmessage",
"headers": {
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.0+93-d811f17090a3+20190505)"
+ ],
"content-length": [
"94"
- ],
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
"accept": [
"application/mercurial-0.1"
- ],
- "content-type": [
- "application/x-www-form-urlencoded"
- ],
- "user-agent": [
- "mercurial/proto-1.0 (Mercurial 4.8.2+682-e2cf04a597cc+20190109)"
- ],
+ ],
"host": [
"phab.mercurial-scm.org"
]
- },
- "body": "corpus=create+public+change+for+phabricator+testing&api.token=cli-hahayouwish"
- }
- },
- {
+ },
+ "body": "api.token=cli-hahayouwish&corpus=create+public+change+for+phabricator+testing",
+ "uri": "https://phab.mercurial-scm.org//api/differential.parsecommitmessage",
+ "method": "POST"
+ },
"response": {
+ "status": {
+ "code": 200,
+ "message": "OK"
+ },
"headers": {
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
"content-type": [
"application/json"
- ],
+ ],
"date": [
- "Thu, 10 Jan 2019 04:08:27 GMT"
- ],
+ "Sun, 05 May 2019 13:31:23 GMT"
+ ],
+ "connection": [
+ "keep-alive"
+ ],
+ "strict-transport-security": [
+ "max-age=31536000; includeSubdomains; preload"
+ ],
+ "vary": [
+ "Accept-Encoding"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "content-length": [
+ "306"
+ ],
"x-content-type-options": [
"nosniff"
- ],
- "cache-control": [
- "no-store"
- ],
- "server": [
- "Apache/2.4.10 (Debian)"
- ],
- "x-xss-protection": [
- "1; mode=block"
- ],
- "x-frame-options": [
- "Deny"
- ],
+ ],
"expires": [
"Sat, 01 Jan 2000 00:00:00 GMT"
- ],
- "set-cookie": [
- "phsid=A%2Fkclyjmm2warvrxwksppx3qxupj4f72ejvxuavrn5; expires=Tue, 09-Jan-2024 04:08:27 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly"
- ],
- "transfer-encoding": [
- "chunked"
- ],
- "strict-transport-security": [
- "max-age=0; includeSubdomains; preload"
]
- },
- "status": {
- "message": "OK",
- "code": 200
- },
+ },
"body": {
- "string": "{\"result\":{\"object\":{\"id\":5544,\"phid\":\"PHID-DREV-bwugldlyieuwzrk76xzy\"},\"transactions\":[{\"phid\":\"PHID-XACT-DREV-wojlvnhodzdoqh6\"},{\"phid\":\"PHID-XACT-DREV-ju3bw7rltmmwpbf\"},{\"phid\":\"PHID-XACT-DREV-2hwwi7dagftdp6q\"},{\"phid\":\"PHID-XACT-DREV-zfsyu5o7wkqzh6s\"},{\"phid\":\"PHID-XACT-DREV-srrkwmheqn6gssk\"}]},\"error_code\":null,\"error_info\":null}"
+ "string": "{\"result\":{\"errors\":[],\"fields\":{\"title\":\"create public change for phabricator testing\"},\"revisionIDFieldInfo\":{\"value\":null,\"validDomain\":\"https:\\/\\/phab.mercurial-scm.org\"},\"transactions\":[{\"type\":\"title\",\"value\":\"create public change for phabricator testing\"}]},\"error_code\":null,\"error_info\":null}"
}
- },
+ }
+ },
+ {
"request": {
- "method": "POST",
- "uri": "https://phab.mercurial-scm.org//api/differential.revision.edit",
"headers": {
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.0+93-d811f17090a3+20190505)"
+ ],
"content-length": [
"253"
- ],
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
"accept": [
"application/mercurial-0.1"
- ],
- "content-type": [
- "application/x-www-form-urlencoded"
- ],
- "user-agent": [
- "mercurial/proto-1.0 (Mercurial 4.8.2+682-e2cf04a597cc+20190109)"
- ],
+ ],
"host": [
"phab.mercurial-scm.org"
]
- },
- "body": "transactions%5B0%5D%5Btype%5D=update&transactions%5B0%5D%5Bvalue%5D=PHID-DIFF-xrku5f3mlveqr3hhj6a7&transactions%5B1%5D%5Btype%5D=title&transactions%5B1%5D%5Bvalue%5D=create+public+change+for+phabricator+testing&api.token=cli-hahayouwish"
- }
- },
- {
+ },
+ "body": "api.token=cli-hahayouwish&transactions%5B0%5D%5Btype%5D=update&transactions%5B0%5D%5Bvalue%5D=PHID-DIFF-uuzq4s7s72y4ts7ijduc&transactions%5B1%5D%5Btype%5D=title&transactions%5B1%5D%5Bvalue%5D=create+public+change+for+phabricator+testing",
+ "uri": "https://phab.mercurial-scm.org//api/differential.revision.edit",
+ "method": "POST"
+ },
"response": {
+ "status": {
+ "code": 200,
+ "message": "OK"
+ },
"headers": {
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
"content-type": [
"application/json"
- ],
+ ],
"date": [
- "Thu, 10 Jan 2019 04:08:28 GMT"
- ],
+ "Sun, 05 May 2019 13:31:24 GMT"
+ ],
+ "connection": [
+ "keep-alive"
+ ],
+ "strict-transport-security": [
+ "max-age=31536000; includeSubdomains; preload"
+ ],
+ "vary": [
+ "Accept-Encoding"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "content-length": [
+ "294"
+ ],
"x-content-type-options": [
"nosniff"
- ],
- "cache-control": [
- "no-store"
- ],
- "server": [
- "Apache/2.4.10 (Debian)"
- ],
- "x-xss-protection": [
- "1; mode=block"
- ],
- "x-frame-options": [
- "Deny"
- ],
+ ],
"expires": [
"Sat, 01 Jan 2000 00:00:00 GMT"
- ],
- "set-cookie": [
- "phsid=A%2Fbw4ordbzl7d4hcgyyxnoawhrfhycrvvkk6arnz5p; expires=Tue, 09-Jan-2024 04:08:28 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly"
- ],
- "transfer-encoding": [
- "chunked"
- ],
- "strict-transport-security": [
- "max-age=0; includeSubdomains; preload"
]
- },
- "status": {
- "message": "OK",
- "code": 200
- },
+ },
"body": {
- "string": "{\"result\":{\"id\":13122,\"phid\":\"PHID-DIFF-iksauhhfhmxfjijyqxji\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/differential\\/diff\\/13122\\/\"},\"error_code\":null,\"error_info\":null}"
+ "string": "{\"result\":{\"object\":{\"id\":1192,\"phid\":\"PHID-DREV-qb4xy3abx7eu4puizvjl\"},\"transactions\":[{\"phid\":\"PHID-XACT-DREV-n2zlzs5qmdlvfbx\"},{\"phid\":\"PHID-XACT-DREV-dwojtdj2d3geffe\"},{\"phid\":\"PHID-XACT-DREV-gr4vgeynol22tgf\"},{\"phid\":\"PHID-XACT-DREV-aighrcyai72tgzv\"}]},\"error_code\":null,\"error_info\":null}"
}
- },
+ }
+ },
+ {
"request": {
- "method": "POST",
- "uri": "https://phab.mercurial-scm.org//api/differential.createrawdiff",
"headers": {
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.0+93-d811f17090a3+20190505)"
+ ],
"content-length": [
"232"
- ],
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
"accept": [
"application/mercurial-0.1"
- ],
- "content-type": [
- "application/x-www-form-urlencoded"
- ],
- "user-agent": [
- "mercurial/proto-1.0 (Mercurial 4.8.2+682-e2cf04a597cc+20190109)"
- ],
+ ],
"host": [
"phab.mercurial-scm.org"
]
- },
- "body": "repositoryPHID=PHID-REPO-bvunnehri4u2isyr7bc3&diff=diff+--git+a%2Falpha+b%2Falpha%0A---+a%2Falpha%0A%2B%2B%2B+b%2Falpha%0A%40%40+-1%2C2+%2B1%2C1+%40%40%0A-alpha%0A-more%0A%2Bdraft+change%0A&api.token=cli-hahayouwish"
- }
- },
- {
+ },
+ "body": "api.token=cli-hahayouwish&diff=diff+--git+a%2Falpha+b%2Falpha%0A---+a%2Falpha%0A%2B%2B%2B+b%2Falpha%0A%40%40+-1%2C2+%2B1%2C1+%40%40%0A-alpha%0A-more%0A%2Bdraft+change%0A&repositoryPHID=PHID-REPO-bvunnehri4u2isyr7bc3",
+ "uri": "https://phab.mercurial-scm.org//api/differential.createrawdiff",
+ "method": "POST"
+ },
"response": {
+ "status": {
+ "code": 200,
+ "message": "OK"
+ },
"headers": {
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
"content-type": [
"application/json"
- ],
+ ],
"date": [
- "Thu, 10 Jan 2019 04:08:29 GMT"
- ],
+ "Sun, 05 May 2019 13:31:25 GMT"
+ ],
+ "connection": [
+ "keep-alive"
+ ],
+ "strict-transport-security": [
+ "max-age=31536000; includeSubdomains; preload"
+ ],
+ "vary": [
+ "Accept-Encoding"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "content-length": [
+ "172"
+ ],
"x-content-type-options": [
"nosniff"
- ],
- "cache-control": [
- "no-store"
- ],
- "server": [
- "Apache/2.4.10 (Debian)"
- ],
- "x-xss-protection": [
- "1; mode=block"
- ],
- "x-frame-options": [
- "Deny"
- ],
+ ],
"expires": [
"Sat, 01 Jan 2000 00:00:00 GMT"
- ],
- "set-cookie": [
- "phsid=A%2Fgt3wmrrlkmpdhyaj5rsesxcwbabhpjlhoa6matcg; expires=Tue, 09-Jan-2024 04:08:29 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly"
- ],
- "transfer-encoding": [
- "chunked"
- ],
- "strict-transport-security": [
- "max-age=0; includeSubdomains; preload"
]
- },
- "status": {
- "message": "OK",
- "code": 200
- },
+ },
"body": {
- "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
+ "string": "{\"result\":{\"id\":1903,\"phid\":\"PHID-DIFF-4pugk2zedyh2xm27uuvh\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/differential\\/diff\\/1903\\/\"},\"error_code\":null,\"error_info\":null}"
}
- },
+ }
+ },
+ {
"request": {
- "method": "POST",
- "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
"headers": {
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.0+93-d811f17090a3+20190505)"
+ ],
"content-length": [
- "264"
- ],
+ "296"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
"accept": [
"application/mercurial-0.1"
- ],
- "content-type": [
- "application/x-www-form-urlencoded"
- ],
- "user-agent": [
- "mercurial/proto-1.0 (Mercurial 4.8.2+682-e2cf04a597cc+20190109)"
- ],
+ ],
"host": [
"phab.mercurial-scm.org"
]
- },
- "body": "name=hg%3Ameta&api.token=cli-hahayouwish&data=%7B%22date%22%3A+%220+0%22%2C+%22user%22%3A+%22test%22%2C+%22node%22%3A+%226bca752686cd24e603094ef55574655c0017723a%22%2C+%22parent%22%3A+%22540a21d3fbeb7c56cafe726bba6cd9fdcc94f29c%22%7D&diff_id=13122"
- }
- },
- {
+ },
+ "body": "diff_id=1903&data=%7B%22branch%22%3A+%22default%22%2C+%22date%22%3A+%220+0%22%2C+%22node%22%3A+%22ac331633be793e0d4159d5525b404a9782f54904%22%2C+%22parent%22%3A+%2224ffd6bca53a1e05369ed5b8834587c2b2b364da%22%2C+%22user%22%3A+%22test%22%7D&name=hg%3Ameta&api.token=cli-hahayouwish",
+ "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
+ "method": "POST"
+ },
"response": {
+ "status": {
+ "code": 200,
+ "message": "OK"
+ },
"headers": {
- "content-type": [
- "application/json"
- ],
- "date": [
- "Thu, 10 Jan 2019 04:08:29 GMT"
- ],
- "x-content-type-options": [
- "nosniff"
- ],
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
"cache-control": [
"no-store"
- ],
- "server": [
- "Apache/2.4.10 (Debian)"
- ],
- "x-xss-protection": [
- "1; mode=block"
- ],
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "date": [
+ "Sun, 05 May 2019 13:31:26 GMT"
+ ],
+ "connection": [
+ "keep-alive"
+ ],
+ "strict-transport-security": [
+ "max-age=31536000; includeSubdomains; preload"
+ ],
+ "vary": [
+ "Accept-Encoding"
+ ],
"x-frame-options": [
"Deny"
- ],
+ ],
+ "content-length": [
+ "51"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
"expires": [
"Sat, 01 Jan 2000 00:00:00 GMT"
- ],
- "set-cookie": [
- "phsid=A%2Fntcsqzh6pptdkfnebvmck6l3y3rrwxzotvsq4phl; expires=Tue, 09-Jan-2024 04:08:29 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly"
- ],
- "transfer-encoding": [
- "chunked"
- ],
- "strict-transport-security": [
- "max-age=0; includeSubdomains; preload"
]
- },
- "status": {
- "message": "OK",
- "code": 200
- },
+ },
"body": {
"string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
}
- },
+ }
+ },
+ {
"request": {
- "method": "POST",
- "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
"headers": {
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.0+93-d811f17090a3+20190505)"
+ ],
"content-length": [
- "227"
- ],
+ "257"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
"accept": [
"application/mercurial-0.1"
- ],
- "content-type": [
- "application/x-www-form-urlencoded"
- ],
- "user-agent": [
- "mercurial/proto-1.0 (Mercurial 4.8.2+682-e2cf04a597cc+20190109)"
- ],
+ ],
"host": [
"phab.mercurial-scm.org"
]
- },
- "body": "name=local%3Acommits&api.token=cli-hahayouwish&data=%7B%226bca752686cd24e603094ef55574655c0017723a%22%3A+%7B%22author%22%3A+%22test%22%2C+%22authorEmail%22%3A+%22test%22%2C+%22time%22%3A+0.0%7D%7D&diff_id=13122"
- }
- },
- {
+ },
+ "body": "diff_id=1903&data=%7B%22ac331633be793e0d4159d5525b404a9782f54904%22%3A+%7B%22author%22%3A+%22test%22%2C+%22authorEmail%22%3A+%22test%22%2C+%22branch%22%3A+%22default%22%2C+%22commit%22%3A+%22ac331633be793e0d4159d5525b404a9782f54904%22%2C+%22parents%22%3A+%5B%2224ffd6bca53a1e05369ed5b8834587c2b2b364da%22%5D%2C+%22time%22%3A+0%7D%7D&name=local%3Acommits&api.token=cli-hahayouwish",
+ "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
+ "method": "POST"
+ },
"response": {
+ "status": {
+ "code": 200,
+ "message": "OK"
+ },
"headers": {
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
"content-type": [
"application/json"
- ],
+ ],
"date": [
- "Thu, 10 Jan 2019 04:08:30 GMT"
- ],
+ "Sun, 05 May 2019 13:31:27 GMT"
+ ],
+ "connection": [
+ "keep-alive"
+ ],
+ "strict-transport-security": [
+ "max-age=31536000; includeSubdomains; preload"
+ ],
+ "vary": [
+ "Accept-Encoding"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "content-length": [
+ "51"
+ ],
"x-content-type-options": [
"nosniff"
- ],
- "cache-control": [
- "no-store"
- ],
- "server": [
- "Apache/2.4.10 (Debian)"
- ],
- "x-xss-protection": [
- "1; mode=block"
- ],
- "x-frame-options": [
- "Deny"
- ],
+ ],
"expires": [
"Sat, 01 Jan 2000 00:00:00 GMT"
- ],
- "set-cookie": [
- "phsid=A%2Fgturi5p5fz64q26mztdrzjldzynp62pp7opcxsnm; expires=Tue, 09-Jan-2024 04:08:30 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly"
- ],
- "transfer-encoding": [
- "chunked"
- ],
- "strict-transport-security": [
- "max-age=0; includeSubdomains; preload"
]
- },
- "status": {
- "message": "OK",
- "code": 200
- },
+ },
"body": {
- "string": "{\"result\":{\"errors\":[],\"fields\":{\"title\":\"create draft change for phabricator testing\"},\"revisionIDFieldInfo\":{\"value\":null,\"validDomain\":\"https:\\/\\/phab.mercurial-scm.org\"}},\"error_code\":null,\"error_info\":null}"
+ "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
}
- },
+ }
+ },
+ {
"request": {
- "method": "POST",
- "uri": "https://phab.mercurial-scm.org//api/differential.parsecommitmessage",
"headers": {
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.0+93-d811f17090a3+20190505)"
+ ],
"content-length": [
"93"
- ],
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
"accept": [
"application/mercurial-0.1"
- ],
- "content-type": [
- "application/x-www-form-urlencoded"
- ],
- "user-agent": [
- "mercurial/proto-1.0 (Mercurial 4.8.2+682-e2cf04a597cc+20190109)"
- ],
+ ],
"host": [
"phab.mercurial-scm.org"
]
- },
- "body": "corpus=create+draft+change+for+phabricator+testing&api.token=cli-hahayouwish"
- }
- },
- {
+ },
+ "body": "api.token=cli-hahayouwish&corpus=create+draft+change+for+phabricator+testing",
+ "uri": "https://phab.mercurial-scm.org//api/differential.parsecommitmessage",
+ "method": "POST"
+ },
"response": {
+ "status": {
+ "code": 200,
+ "message": "OK"
+ },
"headers": {
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
"content-type": [
"application/json"
- ],
+ ],
"date": [
- "Thu, 10 Jan 2019 04:08:31 GMT"
- ],
+ "Sun, 05 May 2019 13:31:27 GMT"
+ ],
+ "connection": [
+ "keep-alive"
+ ],
+ "strict-transport-security": [
+ "max-age=31536000; includeSubdomains; preload"
+ ],
+ "vary": [
+ "Accept-Encoding"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "content-length": [
+ "304"
+ ],
"x-content-type-options": [
"nosniff"
- ],
- "cache-control": [
- "no-store"
- ],
- "server": [
- "Apache/2.4.10 (Debian)"
- ],
- "x-xss-protection": [
- "1; mode=block"
- ],
- "x-frame-options": [
- "Deny"
- ],
+ ],
"expires": [
"Sat, 01 Jan 2000 00:00:00 GMT"
- ],
- "set-cookie": [
- "phsid=A%2F4vyvyabatbn7y5bhav6nthgdt4mm6oeh6ybvnrl5; expires=Tue, 09-Jan-2024 04:08:31 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly"
- ],
- "transfer-encoding": [
- "chunked"
- ],
- "strict-transport-security": [
- "max-age=0; includeSubdomains; preload"
]
- },
- "status": {
- "message": "OK",
- "code": 200
- },
+ },
"body": {
- "string": "{\"result\":{\"object\":{\"id\":5545,\"phid\":\"PHID-DREV-ga6i6vbmatvd2fszrr2o\"},\"transactions\":[{\"phid\":\"PHID-XACT-DREV-epqu5uekkf4ig67\"},{\"phid\":\"PHID-XACT-DREV-y3t5z573bwbqv7e\"},{\"phid\":\"PHID-XACT-DREV-dmjvlq7wngqgwxv\"},{\"phid\":\"PHID-XACT-DREV-rkm576j6wvji3ye\"},{\"phid\":\"PHID-XACT-DREV-mb7ttr44lno6j2w\"},{\"phid\":\"PHID-XACT-DREV-ma747d2dkzk3eun\"},{\"phid\":\"PHID-XACT-DREV-3u7lqg7mwxrix5w\"},{\"phid\":\"PHID-XACT-DREV-r33n73dqn7doz7b\"}]},\"error_code\":null,\"error_info\":null}"
+ "string": "{\"result\":{\"errors\":[],\"fields\":{\"title\":\"create draft change for phabricator testing\"},\"revisionIDFieldInfo\":{\"value\":null,\"validDomain\":\"https:\\/\\/phab.mercurial-scm.org\"},\"transactions\":[{\"type\":\"title\",\"value\":\"create draft change for phabricator testing\"}]},\"error_code\":null,\"error_info\":null}"
}
- },
+ }
+ },
+ {
"request": {
- "method": "POST",
- "uri": "https://phab.mercurial-scm.org//api/differential.revision.edit",
"headers": {
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.0+93-d811f17090a3+20190505)"
+ ],
"content-length": [
"409"
- ],
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
"accept": [
"application/mercurial-0.1"
- ],
- "content-type": [
- "application/x-www-form-urlencoded"
- ],
- "user-agent": [
- "mercurial/proto-1.0 (Mercurial 4.8.2+682-e2cf04a597cc+20190109)"
- ],
+ ],
"host": [
"phab.mercurial-scm.org"
]
- },
- "body": "transactions%5B0%5D%5Btype%5D=update&transactions%5B0%5D%5Bvalue%5D=PHID-DIFF-iksauhhfhmxfjijyqxji&transactions%5B1%5D%5Btype%5D=summary&transactions%5B1%5D%5Bvalue%5D=Depends+on+D5544&transactions%5B2%5D%5Btype%5D=summary&transactions%5B2%5D%5Bvalue%5D=+&transactions%5B3%5D%5Btype%5D=title&transactions%5B3%5D%5Bvalue%5D=create+draft+change+for+phabricator+testing&api.token=cli-hahayouwish"
- }
- },
- {
+ },
+ "body": "api.token=cli-hahayouwish&transactions%5B0%5D%5Btype%5D=update&transactions%5B0%5D%5Bvalue%5D=PHID-DIFF-4pugk2zedyh2xm27uuvh&transactions%5B1%5D%5Btype%5D=parents.set&transactions%5B1%5D%5Bvalue%5D%5B0%5D=PHID-DREV-qb4xy3abx7eu4puizvjl&transactions%5B2%5D%5Btype%5D=title&transactions%5B2%5D%5Bvalue%5D=create+draft+change+for+phabricator+testing",
+ "uri": "https://phab.mercurial-scm.org//api/differential.revision.edit",
+ "method": "POST"
+ },
"response": {
+ "status": {
+ "code": 200,
+ "message": "OK"
+ },
"headers": {
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
"content-type": [
"application/json"
- ],
+ ],
"date": [
- "Thu, 10 Jan 2019 04:08:32 GMT"
- ],
+ "Sun, 05 May 2019 13:31:29 GMT"
+ ],
+ "connection": [
+ "keep-alive"
+ ],
+ "strict-transport-security": [
+ "max-age=31536000; includeSubdomains; preload"
+ ],
+ "vary": [
+ "Accept-Encoding"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "content-length": [
+ "420"
+ ],
"x-content-type-options": [
"nosniff"
- ],
- "cache-control": [
- "no-store"
- ],
- "server": [
- "Apache/2.4.10 (Debian)"
- ],
- "x-xss-protection": [
- "1; mode=block"
- ],
- "x-frame-options": [
- "Deny"
- ],
+ ],
"expires": [
"Sat, 01 Jan 2000 00:00:00 GMT"
- ],
- "set-cookie": [
- "phsid=A%2Fvd66cz7uxztfwfapgqrlmfmoj7szo5wvwk7vqc2u; expires=Tue, 09-Jan-2024 04:08:32 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly"
- ],
- "transfer-encoding": [
- "chunked"
- ],
- "strict-transport-security": [
- "max-age=0; includeSubdomains; preload"
]
- },
- "status": {
- "message": "OK",
- "code": 200
- },
+ },
"body": {
- "string": "{\"result\":[{\"id\":\"5545\",\"phid\":\"PHID-DREV-ga6i6vbmatvd2fszrr2o\",\"title\":\"create draft change for phabricator testing\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/D5545\",\"dateCreated\":\"1547093311\",\"dateModified\":\"1547093311\",\"authorPHID\":\"PHID-USER-tzhaient733lwrlbcag5\",\"status\":\"0\",\"statusName\":\"Needs Review\",\"properties\":[],\"branch\":null,\"summary\":\" \",\"testPlan\":\"\",\"lineCount\":\"3\",\"activeDiffPHID\":\"PHID-DIFF-iksauhhfhmxfjijyqxji\",\"diffs\":[\"13122\"],\"commits\":[],\"reviewers\":{\"PHID-PROJ-3dvcxzznrjru2xmmses3\":\"PHID-PROJ-3dvcxzznrjru2xmmses3\"},\"ccs\":[\"PHID-USER-q42dn7cc3donqriafhjx\"],\"hashes\":[],\"auxiliary\":{\"phabricator:projects\":[],\"phabricator:depends-on\":[\"PHID-DREV-bwugldlyieuwzrk76xzy\"]},\"repositoryPHID\":\"PHID-REPO-bvunnehri4u2isyr7bc3\",\"sourcePath\":null},{\"id\":\"5544\",\"phid\":\"PHID-DREV-bwugldlyieuwzrk76xzy\",\"title\":\"create public change for phabricator testing\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/D5544\",\"dateCreated\":\"1547093307\",\"dateModified\":\"1547093311\",\"authorPHID\":\"PHID-USER-tzhaient733lwrlbcag5\",\"status\":\"0\",\"statusName\":\"Needs Review\",\"properties\":[],\"branch\":null,\"summary\":\"\",\"testPlan\":\"\",\"lineCount\":\"2\",\"activeDiffPHID\":\"PHID-DIFF-xrku5f3mlveqr3hhj6a7\",\"diffs\":[\"13121\"],\"commits\":[],\"reviewers\":{\"PHID-PROJ-3dvcxzznrjru2xmmses3\":\"PHID-PROJ-3dvcxzznrjru2xmmses3\"},\"ccs\":[\"PHID-USER-q42dn7cc3donqriafhjx\"],\"hashes\":[],\"auxiliary\":{\"phabricator:projects\":[],\"phabricator:depends-on\":[]},\"repositoryPHID\":\"PHID-REPO-bvunnehri4u2isyr7bc3\",\"sourcePath\":null}],\"error_code\":null,\"error_info\":null}"
+ "string": "{\"result\":{\"object\":{\"id\":1193,\"phid\":\"PHID-DREV-shdibf6gnumia7pou4wo\"},\"transactions\":[{\"phid\":\"PHID-XACT-DREV-5lh4bjyat7sopph\"},{\"phid\":\"PHID-XACT-DREV-ihh5mnfq4lfd7z6\"},{\"phid\":\"PHID-XACT-DREV-jqgmk2a3klvofsk\"},{\"phid\":\"PHID-XACT-DREV-w5t5g4ke6kjynf3\"},{\"phid\":\"PHID-XACT-DREV-ro7ijohdoyaes55\"},{\"phid\":\"PHID-XACT-DREV-4g3uhii5akj24he\"},{\"phid\":\"PHID-XACT-DREV-44imsawbkha5nqw\"}]},\"error_code\":null,\"error_info\":null}"
}
- },
+ }
+ },
+ {
"request": {
- "method": "POST",
- "uri": "https://phab.mercurial-scm.org//api/differential.query",
"headers": {
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.0+93-d811f17090a3+20190505)"
+ ],
"content-length": [
"74"
- ],
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
"accept": [
"application/mercurial-0.1"
- ],
- "content-type": [
- "application/x-www-form-urlencoded"
- ],
- "user-agent": [
- "mercurial/proto-1.0 (Mercurial 4.8.2+682-e2cf04a597cc+20190109)"
- ],
+ ],
"host": [
"phab.mercurial-scm.org"
]
- },
- "body": "ids%5B0%5D=5544&ids%5B1%5D=5545&api.token=cli-hahayouwish"
- }
- },
- {
+ },
+ "body": "ids%5B0%5D=1192&ids%5B1%5D=1193&api.token=cli-hahayouwish",
+ "uri": "https://phab.mercurial-scm.org//api/differential.query",
+ "method": "POST"
+ },
"response": {
+ "status": {
+ "code": 200,
+ "message": "OK"
+ },
"headers": {
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
"content-type": [
"application/json"
- ],
+ ],
"date": [
- "Thu, 10 Jan 2019 04:08:32 GMT"
- ],
+ "Sun, 05 May 2019 13:31:29 GMT"
+ ],
+ "connection": [
+ "keep-alive"
+ ],
+ "strict-transport-security": [
+ "max-age=31536000; includeSubdomains; preload"
+ ],
+ "vary": [
+ "Accept-Encoding"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "content-length": [
+ "1522"
+ ],
"x-content-type-options": [
"nosniff"
- ],
- "cache-control": [
- "no-store"
- ],
- "server": [
- "Apache/2.4.10 (Debian)"
- ],
- "x-xss-protection": [
- "1; mode=block"
- ],
- "x-frame-options": [
- "Deny"
- ],
+ ],
"expires": [
"Sat, 01 Jan 2000 00:00:00 GMT"
- ],
- "set-cookie": [
- "phsid=A%2Fbqbv2blmnjqe3a5qkpewf5wghxqwcuewjbgfrtq7; expires=Tue, 09-Jan-2024 04:08:32 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly"
- ],
- "transfer-encoding": [
- "chunked"
- ],
- "strict-transport-security": [
- "max-age=0; includeSubdomains; preload"
]
- },
- "status": {
- "message": "OK",
- "code": 200
- },
+ },
"body": {
- "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
+ "string": "{\"result\":[{\"id\":\"1193\",\"phid\":\"PHID-DREV-shdibf6gnumia7pou4wo\",\"title\":\"create draft change for phabricator testing\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/D1193\",\"dateCreated\":\"1557063088\",\"dateModified\":\"1557063088\",\"authorPHID\":\"PHID-USER-qmzis76vb2yh3ogldu6r\",\"status\":\"0\",\"statusName\":\"Draft\",\"properties\":{\"draft.broadcast\":false,\"lines.added\":1,\"lines.removed\":2},\"branch\":null,\"summary\":\" \",\"testPlan\":\"\",\"lineCount\":\"3\",\"activeDiffPHID\":\"PHID-DIFF-4pugk2zedyh2xm27uuvh\",\"diffs\":[\"1903\"],\"commits\":[],\"reviewers\":[],\"ccs\":[],\"hashes\":[],\"auxiliary\":{\"bugzilla.bug-id\":null,\"phabricator:projects\":[\"PHID-PROJ-f2a3wl5wxtqdtfgdjqzk\"],\"phabricator:depends-on\":[\"PHID-DREV-qb4xy3abx7eu4puizvjl\"]},\"repositoryPHID\":\"PHID-REPO-bvunnehri4u2isyr7bc3\",\"sourcePath\":null},{\"id\":\"1192\",\"phid\":\"PHID-DREV-qb4xy3abx7eu4puizvjl\",\"title\":\"create public change for phabricator testing\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/D1192\",\"dateCreated\":\"1557063084\",\"dateModified\":\"1557063088\",\"authorPHID\":\"PHID-USER-qmzis76vb2yh3ogldu6r\",\"status\":\"0\",\"statusName\":\"Needs Review\",\"properties\":{\"draft.broadcast\":true,\"lines.added\":1,\"lines.removed\":1},\"branch\":null,\"summary\":\"\",\"testPlan\":\"\",\"lineCount\":\"2\",\"activeDiffPHID\":\"PHID-DIFF-uuzq4s7s72y4ts7ijduc\",\"diffs\":[\"1902\"],\"commits\":[],\"reviewers\":[],\"ccs\":[],\"hashes\":[],\"auxiliary\":{\"bugzilla.bug-id\":null,\"phabricator:projects\":[],\"phabricator:depends-on\":[]},\"repositoryPHID\":\"PHID-REPO-bvunnehri4u2isyr7bc3\",\"sourcePath\":null}],\"error_code\":null,\"error_info\":null}"
}
- },
+ }
+ },
+ {
"request": {
- "method": "POST",
- "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
"headers": {
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.0+93-d811f17090a3+20190505)"
+ ],
"content-length": [
- "264"
- ],
+ "296"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
"accept": [
"application/mercurial-0.1"
- ],
- "content-type": [
- "application/x-www-form-urlencoded"
- ],
- "user-agent": [
- "mercurial/proto-1.0 (Mercurial 4.8.2+682-e2cf04a597cc+20190109)"
- ],
+ ],
"host": [
"phab.mercurial-scm.org"
]
- },
- "body": "name=hg%3Ameta&api.token=cli-hahayouwish&data=%7B%22date%22%3A+%220+0%22%2C+%22user%22%3A+%22test%22%2C+%22node%22%3A+%22620a50fd6ed958bbee178052de67acc31dcac66e%22%2C+%22parent%22%3A+%22540a21d3fbeb7c56cafe726bba6cd9fdcc94f29c%22%7D&diff_id=13122"
- }
- },
- {
+ },
+ "body": "diff_id=1903&data=%7B%22branch%22%3A+%22default%22%2C+%22date%22%3A+%220+0%22%2C+%22node%22%3A+%22a19f1434f9a578325eb9799c9961b5465d4e6e40%22%2C+%22parent%22%3A+%2224ffd6bca53a1e05369ed5b8834587c2b2b364da%22%2C+%22user%22%3A+%22test%22%7D&name=hg%3Ameta&api.token=cli-hahayouwish",
+ "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
+ "method": "POST"
+ },
"response": {
+ "status": {
+ "code": 200,
+ "message": "OK"
+ },
"headers": {
- "content-type": [
- "application/json"
- ],
- "date": [
- "Thu, 10 Jan 2019 04:08:33 GMT"
- ],
- "x-content-type-options": [
- "nosniff"
- ],
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
"cache-control": [
"no-store"
- ],
- "server": [
- "Apache/2.4.10 (Debian)"
- ],
- "x-xss-protection": [
- "1; mode=block"
- ],
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "date": [
+ "Sun, 05 May 2019 13:31:30 GMT"
+ ],
+ "connection": [
+ "keep-alive"
+ ],
+ "strict-transport-security": [
+ "max-age=31536000; includeSubdomains; preload"
+ ],
+ "vary": [
+ "Accept-Encoding"
+ ],
"x-frame-options": [
"Deny"
- ],
+ ],
+ "content-length": [
+ "51"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
"expires": [
"Sat, 01 Jan 2000 00:00:00 GMT"
- ],
- "set-cookie": [
- "phsid=A%2Fic7sfd33zs7c44ojloujnoicm3roxnre45glurgz; expires=Tue, 09-Jan-2024 04:08:33 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly"
- ],
- "transfer-encoding": [
- "chunked"
- ],
- "strict-transport-security": [
- "max-age=0; includeSubdomains; preload"
]
- },
- "status": {
- "message": "OK",
- "code": 200
- },
+ },
"body": {
"string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
}
- },
+ }
+ },
+ {
"request": {
- "method": "POST",
- "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
"headers": {
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.0+93-d811f17090a3+20190505)"
+ ],
"content-length": [
- "227"
- ],
+ "257"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
"accept": [
"application/mercurial-0.1"
- ],
- "content-type": [
- "application/x-www-form-urlencoded"
- ],
- "user-agent": [
- "mercurial/proto-1.0 (Mercurial 4.8.2+682-e2cf04a597cc+20190109)"
- ],
+ ],
"host": [
"phab.mercurial-scm.org"
]
- },
- "body": "name=local%3Acommits&api.token=cli-hahayouwish&data=%7B%22620a50fd6ed958bbee178052de67acc31dcac66e%22%3A+%7B%22author%22%3A+%22test%22%2C+%22authorEmail%22%3A+%22test%22%2C+%22time%22%3A+0.0%7D%7D&diff_id=13122"
+ },
+ "body": "diff_id=1903&data=%7B%22a19f1434f9a578325eb9799c9961b5465d4e6e40%22%3A+%7B%22author%22%3A+%22test%22%2C+%22authorEmail%22%3A+%22test%22%2C+%22branch%22%3A+%22default%22%2C+%22commit%22%3A+%22a19f1434f9a578325eb9799c9961b5465d4e6e40%22%2C+%22parents%22%3A+%5B%2224ffd6bca53a1e05369ed5b8834587c2b2b364da%22%5D%2C+%22time%22%3A+0%7D%7D&name=local%3Acommits&api.token=cli-hahayouwish",
+ "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
+ "method": "POST"
+ },
+ "response": {
+ "status": {
+ "code": 200,
+ "message": "OK"
+ },
+ "headers": {
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "date": [
+ "Sun, 05 May 2019 13:31:31 GMT"
+ ],
+ "connection": [
+ "keep-alive"
+ ],
+ "strict-transport-security": [
+ "max-age=31536000; includeSubdomains; preload"
+ ],
+ "vary": [
+ "Accept-Encoding"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "content-length": [
+ "51"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ]
+ },
+ "body": {
+ "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
+ }
}
}
- ],
- "version": 1
-}
\ No newline at end of file
+ ]
+}
--- a/tests/phabricator/phabsend-update-alpha-create-beta.json Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/phabricator/phabsend-update-alpha-create-beta.json Mon Jul 22 14:00:33 2019 -0400
@@ -1,1025 +1,1025 @@
{
+ "version": 1,
"interactions": [
{
"request": {
- "method": "POST",
- "body": "api.token=cli-hahayouwish&revisionIDs%5B0%5D=6054",
- "uri": "https://phab.mercurial-scm.org//api/differential.querydiffs",
"headers": {
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.0+93-d811f17090a3+20190505)"
+ ],
+ "content-length": [
+ "66"
+ ],
"content-type": [
"application/x-www-form-urlencoded"
],
"accept": [
"application/mercurial-0.1"
],
- "user-agent": [
- "mercurial/proto-1.0 (Mercurial 4.9+477-7c86ec0ca5c5+20190303)"
- ],
"host": [
"phab.mercurial-scm.org"
- ],
- "content-length": [
- "66"
]
- }
+ },
+ "body": "api.token=cli-hahayouwish&revisionIDs%5B0%5D=1190",
+ "uri": "https://phab.mercurial-scm.org//api/differential.querydiffs",
+ "method": "POST"
},
"response": {
"status": {
"code": 200,
"message": "OK"
},
- "body": {
- "string": "{\"result\":{\"14303\":{\"id\":\"14303\",\"revisionID\":\"6054\",\"dateCreated\":\"1551571944\",\"dateModified\":\"1551571947\",\"sourceControlBaseRevision\":null,\"sourceControlPath\":null,\"sourceControlSystem\":null,\"branch\":null,\"bookmark\":null,\"creationMethod\":\"web\",\"description\":null,\"unitStatus\":\"4\",\"lintStatus\":\"4\",\"changes\":[{\"id\":\"32287\",\"metadata\":{\"line:first\":1},\"oldPath\":null,\"currentPath\":\"alpha\",\"awayPaths\":[],\"oldProperties\":[],\"newProperties\":{\"unix:filemode\":\"100644\"},\"type\":\"1\",\"fileType\":\"1\",\"commitHash\":null,\"addLines\":\"1\",\"delLines\":\"0\",\"hunks\":[{\"oldOffset\":\"0\",\"newOffset\":\"1\",\"oldLength\":\"0\",\"newLength\":\"1\",\"addLines\":null,\"delLines\":null,\"isMissingOldNewline\":null,\"isMissingNewNewline\":null,\"corpus\":\"+alpha\\n\"}]}],\"properties\":{\"hg:meta\":{\"user\":\"test\",\"parent\":\"0000000000000000000000000000000000000000\",\"node\":\"cb03845d6dd98c72bec766c7ed08c693cc49817a\",\"date\":\"0 0\"},\"local:commits\":{\"cb03845d6dd98c72bec766c7ed08c693cc49817a\":{\"author\":\"test\",\"authorEmail\":\"test\",\"time\":0}}},\"authorName\":\"test\",\"authorEmail\":\"test\"}},\"error_code\":null,\"error_info\":null}"
- },
"headers": {
- "expires": [
- "Sat, 01 Jan 2000 00:00:00 GMT"
- ],
"x-xss-protection": [
"1; mode=block"
],
- "transfer-encoding": [
- "chunked"
- ],
- "date": [
- "Sun, 03 Mar 2019 00:12:30 GMT"
- ],
- "x-frame-options": [
- "Deny"
- ],
"cache-control": [
"no-store"
],
"content-type": [
"application/json"
],
+ "date": [
+ "Sun, 05 May 2019 13:31:08 GMT"
+ ],
+ "connection": [
+ "keep-alive"
+ ],
+ "strict-transport-security": [
+ "max-age=31536000; includeSubdomains; preload"
+ ],
+ "vary": [
+ "Accept-Encoding"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "content-length": [
+ "1132"
+ ],
"x-content-type-options": [
"nosniff"
],
- "server": [
- "Apache/2.4.10 (Debian)"
- ],
- "set-cookie": [
- "phsid=A%2Fnf3xdxgvvgky277foc7s2p6xrgtsvn4bzmayrbmb; expires=Fri, 01-Mar-2024 00:12:30 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly"
- ],
- "strict-transport-security": [
- "max-age=0; includeSubdomains; preload"
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
]
+ },
+ "body": {
+ "string": "{\"result\":{\"1899\":{\"id\":\"1899\",\"revisionID\":\"1190\",\"dateCreated\":\"1557063061\",\"dateModified\":\"1557063064\",\"sourceControlBaseRevision\":null,\"sourceControlPath\":null,\"sourceControlSystem\":null,\"branch\":null,\"bookmark\":null,\"creationMethod\":\"web\",\"description\":null,\"unitStatus\":\"4\",\"lintStatus\":\"4\",\"changes\":[{\"id\":\"4355\",\"metadata\":{\"line:first\":1,\"hash.effect\":\"g6dr_XSxA9EP\"},\"oldPath\":null,\"currentPath\":\"alpha\",\"awayPaths\":[],\"oldProperties\":[],\"newProperties\":{\"unix:filemode\":\"100644\"},\"type\":\"1\",\"fileType\":\"1\",\"commitHash\":null,\"addLines\":\"1\",\"delLines\":\"0\",\"hunks\":[{\"oldOffset\":\"0\",\"newOffset\":\"1\",\"oldLength\":\"0\",\"newLength\":\"1\",\"addLines\":null,\"delLines\":null,\"isMissingOldNewline\":null,\"isMissingNewNewline\":null,\"corpus\":\"+alpha\\n\"}]}],\"properties\":{\"hg:meta\":{\"branch\":\"default\",\"date\":\"0 0\",\"node\":\"53fe3a1e0f42670a88ad845247b2ed4d5e645434\",\"parent\":\"0000000000000000000000000000000000000000\",\"user\":\"test\"},\"local:commits\":{\"53fe3a1e0f42670a88ad845247b2ed4d5e645434\":{\"author\":\"test\",\"authorEmail\":\"test\",\"branch\":\"default\",\"time\":0}}},\"authorName\":\"test\",\"authorEmail\":\"test\"}},\"error_code\":null,\"error_info\":null}"
}
}
},
{
"request": {
- "method": "POST",
- "body": "constraints%5Bcallsigns%5D%5B0%5D=HG&api.token=cli-hahayouwish",
- "uri": "https://phab.mercurial-scm.org//api/diffusion.repository.search",
"headers": {
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.0+93-d811f17090a3+20190505)"
+ ],
+ "content-length": [
+ "93"
+ ],
"content-type": [
"application/x-www-form-urlencoded"
],
"accept": [
"application/mercurial-0.1"
],
- "user-agent": [
- "mercurial/proto-1.0 (Mercurial 4.9+477-7c86ec0ca5c5+20190303)"
- ],
"host": [
"phab.mercurial-scm.org"
- ],
- "content-length": [
- "79"
]
- }
+ },
+ "body": "api.token=cli-hahayouwish&constraints%5Bcallsigns%5D%5B0%5D=HG",
+ "uri": "https://phab.mercurial-scm.org//api/diffusion.repository.search",
+ "method": "POST"
},
"response": {
"status": {
"code": 200,
"message": "OK"
},
- "body": {
- "string": "{\"result\":{\"data\":[{\"id\":2,\"type\":\"REPO\",\"phid\":\"PHID-REPO-bvunnehri4u2isyr7bc3\",\"fields\":{\"name\":\"Mercurial\",\"vcs\":\"hg\",\"callsign\":\"HG\",\"shortName\":null,\"status\":\"active\",\"isImporting\":false,\"spacePHID\":null,\"dateCreated\":1498761653,\"dateModified\":1500403184,\"policy\":{\"view\":\"public\",\"edit\":\"admin\",\"diffusion.push\":\"users\"}},\"attachments\":{}}],\"maps\":{},\"query\":{\"queryKey\":null},\"cursor\":{\"limit\":100,\"after\":null,\"before\":null,\"order\":null}},\"error_code\":null,\"error_info\":null}"
- },
"headers": {
- "expires": [
- "Sat, 01 Jan 2000 00:00:00 GMT"
- ],
"x-xss-protection": [
"1; mode=block"
],
- "transfer-encoding": [
- "chunked"
- ],
- "date": [
- "Sun, 03 Mar 2019 00:12:31 GMT"
- ],
- "x-frame-options": [
- "Deny"
- ],
"cache-control": [
"no-store"
],
"content-type": [
"application/json"
],
+ "date": [
+ "Sun, 05 May 2019 13:31:09 GMT"
+ ],
+ "connection": [
+ "keep-alive"
+ ],
+ "strict-transport-security": [
+ "max-age=31536000; includeSubdomains; preload"
+ ],
+ "vary": [
+ "Accept-Encoding"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "content-length": [
+ "549"
+ ],
"x-content-type-options": [
"nosniff"
],
- "server": [
- "Apache/2.4.10 (Debian)"
- ],
- "set-cookie": [
- "phsid=A%2Fmlq7cl6pakmia2uecfcevwhdl3hyqe6rdb2y7usm; expires=Fri, 01-Mar-2024 00:12:31 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly"
- ],
- "strict-transport-security": [
- "max-age=0; includeSubdomains; preload"
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
]
+ },
+ "body": {
+ "string": "{\"result\":{\"data\":[{\"id\":10,\"type\":\"REPO\",\"phid\":\"PHID-REPO-bvunnehri4u2isyr7bc3\",\"fields\":{\"name\":\"Mercurial\",\"vcs\":\"hg\",\"callsign\":\"HG\",\"shortName\":\"Mercurial\",\"status\":\"active\",\"isImporting\":false,\"almanacServicePHID\":null,\"spacePHID\":null,\"dateCreated\":1507817156,\"dateModified\":1529613276,\"policy\":{\"view\":\"public\",\"edit\":\"admin\",\"diffusion.push\":\"users\"}},\"attachments\":{}}],\"maps\":{},\"query\":{\"queryKey\":null},\"cursor\":{\"limit\":100,\"after\":null,\"before\":null,\"order\":null}},\"error_code\":null,\"error_info\":null}"
}
}
},
{
"request": {
- "method": "POST",
- "body": "repositoryPHID=PHID-REPO-bvunnehri4u2isyr7bc3&api.token=cli-hahayouwish&diff=diff+--git+a%2Falpha+b%2Falpha%0Anew+file+mode+100644%0A---+%2Fdev%2Fnull%0A%2B%2B%2B+b%2Falpha%0A%40%40+-0%2C0+%2B1%2C2+%40%40%0A%2Balpha%0A%2Bmore%0A",
- "uri": "https://phab.mercurial-scm.org//api/differential.createrawdiff",
"headers": {
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.0+93-d811f17090a3+20190505)"
+ ],
+ "content-length": [
+ "245"
+ ],
"content-type": [
"application/x-www-form-urlencoded"
],
"accept": [
"application/mercurial-0.1"
],
- "user-agent": [
- "mercurial/proto-1.0 (Mercurial 4.9+477-7c86ec0ca5c5+20190303)"
- ],
"host": [
"phab.mercurial-scm.org"
- ],
- "content-length": [
- "245"
]
- }
+ },
+ "body": "api.token=cli-hahayouwish&diff=diff+--git+a%2Falpha+b%2Falpha%0Anew+file+mode+100644%0A---+%2Fdev%2Fnull%0A%2B%2B%2B+b%2Falpha%0A%40%40+-0%2C0+%2B1%2C2+%40%40%0A%2Balpha%0A%2Bmore%0A&repositoryPHID=PHID-REPO-bvunnehri4u2isyr7bc3",
+ "uri": "https://phab.mercurial-scm.org//api/differential.createrawdiff",
+ "method": "POST"
},
"response": {
"status": {
"code": 200,
"message": "OK"
},
- "body": {
- "string": "{\"result\":{\"id\":14304,\"phid\":\"PHID-DIFF-3wv2fwmzp27uamb66xxg\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/differential\\/diff\\/14304\\/\"},\"error_code\":null,\"error_info\":null}"
- },
"headers": {
- "expires": [
- "Sat, 01 Jan 2000 00:00:00 GMT"
- ],
"x-xss-protection": [
"1; mode=block"
],
- "transfer-encoding": [
- "chunked"
- ],
- "date": [
- "Sun, 03 Mar 2019 00:12:32 GMT"
- ],
- "x-frame-options": [
- "Deny"
- ],
"cache-control": [
"no-store"
],
"content-type": [
"application/json"
],
+ "date": [
+ "Sun, 05 May 2019 13:31:09 GMT"
+ ],
+ "connection": [
+ "keep-alive"
+ ],
+ "strict-transport-security": [
+ "max-age=31536000; includeSubdomains; preload"
+ ],
+ "vary": [
+ "Accept-Encoding"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "content-length": [
+ "172"
+ ],
"x-content-type-options": [
"nosniff"
],
- "server": [
- "Apache/2.4.10 (Debian)"
- ],
- "set-cookie": [
- "phsid=A%2Fptjtujvqlcwhzs4yhneogb323aqessc5axlu4rif; expires=Fri, 01-Mar-2024 00:12:32 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly"
- ],
- "strict-transport-security": [
- "max-age=0; includeSubdomains; preload"
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
]
+ },
+ "body": {
+ "string": "{\"result\":{\"id\":1900,\"phid\":\"PHID-DIFF-gra4b3ivsgebktbeoxxx\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/differential\\/diff\\/1900\\/\"},\"error_code\":null,\"error_info\":null}"
}
}
},
{
"request": {
- "method": "POST",
- "body": "diff_id=14304&data=%7B%22user%22%3A+%22test%22%2C+%22parent%22%3A+%220000000000000000000000000000000000000000%22%2C+%22node%22%3A+%22939d862f03181a366fea64a540baf0bb33f85d92%22%2C+%22date%22%3A+%220+0%22%7D&api.token=cli-hahayouwish&name=hg%3Ameta",
- "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
"headers": {
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.0+93-d811f17090a3+20190505)"
+ ],
+ "content-length": [
+ "296"
+ ],
"content-type": [
"application/x-www-form-urlencoded"
],
"accept": [
"application/mercurial-0.1"
],
- "user-agent": [
- "mercurial/proto-1.0 (Mercurial 4.9+477-7c86ec0ca5c5+20190303)"
- ],
"host": [
"phab.mercurial-scm.org"
- ],
- "content-length": [
- "264"
]
- }
+ },
+ "body": "diff_id=1900&data=%7B%22branch%22%3A+%22default%22%2C+%22date%22%3A+%220+0%22%2C+%22node%22%3A+%22d940d39fb603f29ea5df4b7c15f315fe6ff4e346%22%2C+%22parent%22%3A+%220000000000000000000000000000000000000000%22%2C+%22user%22%3A+%22test%22%7D&name=hg%3Ameta&api.token=cli-hahayouwish",
+ "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
+ "method": "POST"
},
"response": {
"status": {
"code": 200,
"message": "OK"
},
- "body": {
- "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
- },
"headers": {
- "expires": [
- "Sat, 01 Jan 2000 00:00:00 GMT"
- ],
"x-xss-protection": [
"1; mode=block"
],
- "transfer-encoding": [
- "chunked"
- ],
- "date": [
- "Sun, 03 Mar 2019 00:12:32 GMT"
- ],
- "x-frame-options": [
- "Deny"
- ],
"cache-control": [
"no-store"
],
"content-type": [
"application/json"
],
+ "date": [
+ "Sun, 05 May 2019 13:31:10 GMT"
+ ],
+ "connection": [
+ "keep-alive"
+ ],
+ "strict-transport-security": [
+ "max-age=31536000; includeSubdomains; preload"
+ ],
+ "vary": [
+ "Accept-Encoding"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "content-length": [
+ "51"
+ ],
"x-content-type-options": [
"nosniff"
],
- "server": [
- "Apache/2.4.10 (Debian)"
- ],
- "set-cookie": [
- "phsid=A%2Feho2462w6mulsjeoz3e4rwgf37aekqwgpqmarn2f; expires=Fri, 01-Mar-2024 00:12:32 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly"
- ],
- "strict-transport-security": [
- "max-age=0; includeSubdomains; preload"
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
]
+ },
+ "body": {
+ "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
}
}
},
{
"request": {
- "method": "POST",
- "body": "diff_id=14304&data=%7B%22939d862f03181a366fea64a540baf0bb33f85d92%22%3A+%7B%22author%22%3A+%22test%22%2C+%22authorEmail%22%3A+%22test%22%2C+%22time%22%3A+0.0%7D%7D&api.token=cli-hahayouwish&name=local%3Acommits",
- "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
"headers": {
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.0+93-d811f17090a3+20190505)"
+ ],
+ "content-length": [
+ "257"
+ ],
"content-type": [
"application/x-www-form-urlencoded"
],
"accept": [
"application/mercurial-0.1"
],
- "user-agent": [
- "mercurial/proto-1.0 (Mercurial 4.9+477-7c86ec0ca5c5+20190303)"
- ],
"host": [
"phab.mercurial-scm.org"
- ],
- "content-length": [
- "227"
]
- }
+ },
+ "body": "diff_id=1900&data=%7B%22d940d39fb603f29ea5df4b7c15f315fe6ff4e346%22%3A+%7B%22author%22%3A+%22test%22%2C+%22authorEmail%22%3A+%22test%22%2C+%22branch%22%3A+%22default%22%2C+%22commit%22%3A+%22d940d39fb603f29ea5df4b7c15f315fe6ff4e346%22%2C+%22parents%22%3A+%5B%220000000000000000000000000000000000000000%22%5D%2C+%22time%22%3A+0%7D%7D&name=local%3Acommits&api.token=cli-hahayouwish",
+ "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
+ "method": "POST"
},
"response": {
"status": {
"code": 200,
"message": "OK"
},
- "body": {
- "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
- },
"headers": {
- "expires": [
- "Sat, 01 Jan 2000 00:00:00 GMT"
- ],
"x-xss-protection": [
"1; mode=block"
],
- "transfer-encoding": [
- "chunked"
- ],
- "date": [
- "Sun, 03 Mar 2019 00:12:33 GMT"
- ],
- "x-frame-options": [
- "Deny"
- ],
"cache-control": [
"no-store"
],
"content-type": [
"application/json"
],
+ "date": [
+ "Sun, 05 May 2019 13:31:11 GMT"
+ ],
+ "connection": [
+ "keep-alive"
+ ],
+ "strict-transport-security": [
+ "max-age=31536000; includeSubdomains; preload"
+ ],
+ "vary": [
+ "Accept-Encoding"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "content-length": [
+ "51"
+ ],
"x-content-type-options": [
"nosniff"
],
- "server": [
- "Apache/2.4.10 (Debian)"
- ],
- "set-cookie": [
- "phsid=A%2F4ca3h5qhtwgn55t3zznczixyt2st4tm44t23aceg; expires=Fri, 01-Mar-2024 00:12:33 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly"
- ],
- "strict-transport-security": [
- "max-age=0; includeSubdomains; preload"
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
]
+ },
+ "body": {
+ "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
}
}
},
{
"request": {
- "method": "POST",
- "body": "api.token=cli-hahayouwish&corpus=create+alpha+for+phabricator+test+%E2%82%AC%0A%0ADifferential+Revision%3A+https%3A%2F%2Fphab.mercurial-scm.org%2FD6054",
- "uri": "https://phab.mercurial-scm.org//api/differential.parsecommitmessage",
"headers": {
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.0+93-d811f17090a3+20190505)"
+ ],
+ "content-length": [
+ "173"
+ ],
"content-type": [
"application/x-www-form-urlencoded"
],
"accept": [
"application/mercurial-0.1"
],
- "user-agent": [
- "mercurial/proto-1.0 (Mercurial 4.9+477-7c86ec0ca5c5+20190303)"
- ],
"host": [
"phab.mercurial-scm.org"
- ],
- "content-length": [
- "168"
]
- }
+ },
+ "body": "api.token=cli-hahayouwish&corpus=create+alpha+for+phabricator+test+%E2%82%AC%0A%0ADifferential+Revision%3A+https%3A%2F%2Fphab.mercurial-scm.org%2FD1190",
+ "uri": "https://phab.mercurial-scm.org//api/differential.parsecommitmessage",
+ "method": "POST"
},
"response": {
"status": {
"code": 200,
"message": "OK"
},
- "body": {
- "string": "{\"result\":{\"errors\":[],\"fields\":{\"title\":\"create alpha for phabricator test \\u20ac\",\"revisionID\":6054},\"revisionIDFieldInfo\":{\"value\":6054,\"validDomain\":\"https:\\/\\/phab.mercurial-scm.org\"}},\"error_code\":null,\"error_info\":null}"
- },
"headers": {
- "expires": [
- "Sat, 01 Jan 2000 00:00:00 GMT"
- ],
"x-xss-protection": [
"1; mode=block"
],
- "transfer-encoding": [
- "chunked"
- ],
- "date": [
- "Sun, 03 Mar 2019 00:12:34 GMT"
- ],
- "x-frame-options": [
- "Deny"
- ],
"cache-control": [
"no-store"
],
"content-type": [
"application/json"
],
+ "date": [
+ "Sun, 05 May 2019 13:31:11 GMT"
+ ],
+ "connection": [
+ "keep-alive"
+ ],
+ "strict-transport-security": [
+ "max-age=31536000; includeSubdomains; preload"
+ ],
+ "vary": [
+ "Accept-Encoding"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "content-length": [
+ "316"
+ ],
"x-content-type-options": [
"nosniff"
],
- "server": [
- "Apache/2.4.10 (Debian)"
- ],
- "set-cookie": [
- "phsid=A%2F7pvtbpw2waiblbsbydew3vfpulqnccf4647ymipq; expires=Fri, 01-Mar-2024 00:12:34 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly"
- ],
- "strict-transport-security": [
- "max-age=0; includeSubdomains; preload"
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
]
+ },
+ "body": {
+ "string": "{\"result\":{\"errors\":[],\"fields\":{\"title\":\"create alpha for phabricator test \\u20ac\",\"revisionID\":1190},\"revisionIDFieldInfo\":{\"value\":1190,\"validDomain\":\"https:\\/\\/phab.mercurial-scm.org\"},\"transactions\":[{\"type\":\"title\",\"value\":\"create alpha for phabricator test \\u20ac\"}]},\"error_code\":null,\"error_info\":null}"
}
}
},
{
"request": {
- "method": "POST",
- "body": "api.token=cli-hahayouwish&transactions%5B0%5D%5Btype%5D=update&transactions%5B0%5D%5Bvalue%5D=PHID-DIFF-3wv2fwmzp27uamb66xxg&transactions%5B1%5D%5Btype%5D=title&transactions%5B1%5D%5Bvalue%5D=create+alpha+for+phabricator+test+%E2%82%AC&objectIdentifier=6054",
- "uri": "https://phab.mercurial-scm.org//api/differential.revision.edit",
"headers": {
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.0+93-d811f17090a3+20190505)"
+ ],
+ "content-length": [
+ "274"
+ ],
"content-type": [
"application/x-www-form-urlencoded"
],
"accept": [
"application/mercurial-0.1"
],
- "user-agent": [
- "mercurial/proto-1.0 (Mercurial 4.9+477-7c86ec0ca5c5+20190303)"
- ],
"host": [
"phab.mercurial-scm.org"
- ],
- "content-length": [
- "274"
]
- }
+ },
+ "body": "api.token=cli-hahayouwish&objectIdentifier=1190&transactions%5B0%5D%5Btype%5D=update&transactions%5B0%5D%5Bvalue%5D=PHID-DIFF-gra4b3ivsgebktbeoxxx&transactions%5B1%5D%5Btype%5D=title&transactions%5B1%5D%5Bvalue%5D=create+alpha+for+phabricator+test+%E2%82%AC",
+ "uri": "https://phab.mercurial-scm.org//api/differential.revision.edit",
+ "method": "POST"
},
"response": {
"status": {
"code": 200,
"message": "OK"
},
- "body": {
- "string": "{\"result\":{\"object\":{\"id\":\"6054\",\"phid\":\"PHID-DREV-6pczsbtdpqjc2nskmxwy\"},\"transactions\":[{\"phid\":\"PHID-XACT-DREV-mc2gfyoyhkfz7dy\"}]},\"error_code\":null,\"error_info\":null}"
- },
"headers": {
- "expires": [
- "Sat, 01 Jan 2000 00:00:00 GMT"
- ],
"x-xss-protection": [
"1; mode=block"
],
- "transfer-encoding": [
- "chunked"
- ],
- "date": [
- "Sun, 03 Mar 2019 00:12:34 GMT"
- ],
- "x-frame-options": [
- "Deny"
- ],
"cache-control": [
"no-store"
],
"content-type": [
"application/json"
],
+ "date": [
+ "Sun, 05 May 2019 13:31:12 GMT"
+ ],
+ "connection": [
+ "keep-alive"
+ ],
+ "strict-transport-security": [
+ "max-age=31536000; includeSubdomains; preload"
+ ],
+ "vary": [
+ "Accept-Encoding"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "content-length": [
+ "168"
+ ],
"x-content-type-options": [
"nosniff"
],
- "server": [
- "Apache/2.4.10 (Debian)"
- ],
- "set-cookie": [
- "phsid=A%2Fhmyuw3lg6h4joaswqnfcmnzdkp6p2qxotsvahb7l; expires=Fri, 01-Mar-2024 00:12:34 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly"
- ],
- "strict-transport-security": [
- "max-age=0; includeSubdomains; preload"
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
]
+ },
+ "body": {
+ "string": "{\"result\":{\"object\":{\"id\":1190,\"phid\":\"PHID-DREV-kikesmfxhzpfaxbzgj3l\"},\"transactions\":[{\"phid\":\"PHID-XACT-DREV-tk6ciodgzlwo2v6\"}]},\"error_code\":null,\"error_info\":null}"
}
}
},
{
"request": {
- "method": "POST",
- "body": "repositoryPHID=PHID-REPO-bvunnehri4u2isyr7bc3&api.token=cli-hahayouwish&diff=diff+--git+a%2Fbeta+b%2Fbeta%0Anew+file+mode+100644%0A---+%2Fdev%2Fnull%0A%2B%2B%2B+b%2Fbeta%0A%40%40+-0%2C0+%2B1%2C1+%40%40%0A%2Bbeta%0A",
- "uri": "https://phab.mercurial-scm.org//api/differential.createrawdiff",
"headers": {
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.0+93-d811f17090a3+20190505)"
+ ],
+ "content-length": [
+ "231"
+ ],
"content-type": [
"application/x-www-form-urlencoded"
],
"accept": [
"application/mercurial-0.1"
],
- "user-agent": [
- "mercurial/proto-1.0 (Mercurial 4.9+477-7c86ec0ca5c5+20190303)"
- ],
"host": [
"phab.mercurial-scm.org"
- ],
- "content-length": [
- "231"
]
- }
+ },
+ "body": "api.token=cli-hahayouwish&diff=diff+--git+a%2Fbeta+b%2Fbeta%0Anew+file+mode+100644%0A---+%2Fdev%2Fnull%0A%2B%2B%2B+b%2Fbeta%0A%40%40+-0%2C0+%2B1%2C1+%40%40%0A%2Bbeta%0A&repositoryPHID=PHID-REPO-bvunnehri4u2isyr7bc3",
+ "uri": "https://phab.mercurial-scm.org//api/differential.createrawdiff",
+ "method": "POST"
},
"response": {
"status": {
"code": 200,
"message": "OK"
},
- "body": {
- "string": "{\"result\":{\"id\":14305,\"phid\":\"PHID-DIFF-pofynzhmmqm2czm33teg\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/differential\\/diff\\/14305\\/\"},\"error_code\":null,\"error_info\":null}"
- },
"headers": {
- "expires": [
- "Sat, 01 Jan 2000 00:00:00 GMT"
- ],
"x-xss-protection": [
"1; mode=block"
],
- "transfer-encoding": [
- "chunked"
- ],
- "date": [
- "Sun, 03 Mar 2019 00:12:35 GMT"
- ],
- "x-frame-options": [
- "Deny"
- ],
"cache-control": [
"no-store"
],
"content-type": [
"application/json"
],
+ "date": [
+ "Sun, 05 May 2019 13:31:13 GMT"
+ ],
+ "connection": [
+ "keep-alive"
+ ],
+ "strict-transport-security": [
+ "max-age=31536000; includeSubdomains; preload"
+ ],
+ "vary": [
+ "Accept-Encoding"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "content-length": [
+ "172"
+ ],
"x-content-type-options": [
"nosniff"
],
- "server": [
- "Apache/2.4.10 (Debian)"
- ],
- "set-cookie": [
- "phsid=A%2F2xpzt6bryn7n3gug3ll7iu2gfqyy4zss5d7nolew; expires=Fri, 01-Mar-2024 00:12:35 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly"
- ],
- "strict-transport-security": [
- "max-age=0; includeSubdomains; preload"
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
]
+ },
+ "body": {
+ "string": "{\"result\":{\"id\":1901,\"phid\":\"PHID-DIFF-uhbyhoejzbniwwzj2q5c\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/differential\\/diff\\/1901\\/\"},\"error_code\":null,\"error_info\":null}"
}
}
},
{
"request": {
- "method": "POST",
- "body": "diff_id=14305&data=%7B%22user%22%3A+%22test%22%2C+%22parent%22%3A+%22939d862f03181a366fea64a540baf0bb33f85d92%22%2C+%22node%22%3A+%22f55f947ed0f8ad80a04b7e87a0bf9febda2070b1%22%2C+%22date%22%3A+%220+0%22%7D&api.token=cli-hahayouwish&name=hg%3Ameta",
- "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
"headers": {
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.0+93-d811f17090a3+20190505)"
+ ],
+ "content-length": [
+ "296"
+ ],
"content-type": [
"application/x-www-form-urlencoded"
],
"accept": [
"application/mercurial-0.1"
],
- "user-agent": [
- "mercurial/proto-1.0 (Mercurial 4.9+477-7c86ec0ca5c5+20190303)"
- ],
"host": [
"phab.mercurial-scm.org"
- ],
- "content-length": [
- "264"
]
- }
+ },
+ "body": "diff_id=1901&data=%7B%22branch%22%3A+%22default%22%2C+%22date%22%3A+%220+0%22%2C+%22node%22%3A+%224b2486dfc8c7b238e70f8b022f9e09a0ea220415%22%2C+%22parent%22%3A+%22d940d39fb603f29ea5df4b7c15f315fe6ff4e346%22%2C+%22user%22%3A+%22test%22%7D&name=hg%3Ameta&api.token=cli-hahayouwish",
+ "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
+ "method": "POST"
},
"response": {
"status": {
"code": 200,
"message": "OK"
},
- "body": {
- "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
- },
"headers": {
- "expires": [
- "Sat, 01 Jan 2000 00:00:00 GMT"
- ],
"x-xss-protection": [
"1; mode=block"
],
- "transfer-encoding": [
- "chunked"
- ],
- "date": [
- "Sun, 03 Mar 2019 00:12:36 GMT"
- ],
- "x-frame-options": [
- "Deny"
- ],
"cache-control": [
"no-store"
],
"content-type": [
"application/json"
],
+ "date": [
+ "Sun, 05 May 2019 13:31:14 GMT"
+ ],
+ "connection": [
+ "keep-alive"
+ ],
+ "strict-transport-security": [
+ "max-age=31536000; includeSubdomains; preload"
+ ],
+ "vary": [
+ "Accept-Encoding"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "content-length": [
+ "51"
+ ],
"x-content-type-options": [
"nosniff"
],
- "server": [
- "Apache/2.4.10 (Debian)"
- ],
- "set-cookie": [
- "phsid=A%2Fygzbpe74xh6shrejkd3tj32t4gaqnvumy63iudrd; expires=Fri, 01-Mar-2024 00:12:36 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly"
- ],
- "strict-transport-security": [
- "max-age=0; includeSubdomains; preload"
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
]
+ },
+ "body": {
+ "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
}
}
},
{
"request": {
- "method": "POST",
- "body": "diff_id=14305&data=%7B%22f55f947ed0f8ad80a04b7e87a0bf9febda2070b1%22%3A+%7B%22author%22%3A+%22test%22%2C+%22authorEmail%22%3A+%22test%22%2C+%22time%22%3A+0.0%7D%7D&api.token=cli-hahayouwish&name=local%3Acommits",
- "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
"headers": {
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.0+93-d811f17090a3+20190505)"
+ ],
+ "content-length": [
+ "257"
+ ],
"content-type": [
"application/x-www-form-urlencoded"
],
"accept": [
"application/mercurial-0.1"
],
- "user-agent": [
- "mercurial/proto-1.0 (Mercurial 4.9+477-7c86ec0ca5c5+20190303)"
- ],
"host": [
"phab.mercurial-scm.org"
- ],
- "content-length": [
- "227"
]
- }
+ },
+ "body": "diff_id=1901&data=%7B%224b2486dfc8c7b238e70f8b022f9e09a0ea220415%22%3A+%7B%22author%22%3A+%22test%22%2C+%22authorEmail%22%3A+%22test%22%2C+%22branch%22%3A+%22default%22%2C+%22commit%22%3A+%224b2486dfc8c7b238e70f8b022f9e09a0ea220415%22%2C+%22parents%22%3A+%5B%22d940d39fb603f29ea5df4b7c15f315fe6ff4e346%22%5D%2C+%22time%22%3A+0%7D%7D&name=local%3Acommits&api.token=cli-hahayouwish",
+ "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
+ "method": "POST"
},
"response": {
"status": {
"code": 200,
"message": "OK"
},
- "body": {
- "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
- },
"headers": {
- "expires": [
- "Sat, 01 Jan 2000 00:00:00 GMT"
- ],
"x-xss-protection": [
"1; mode=block"
],
- "transfer-encoding": [
- "chunked"
- ],
- "date": [
- "Sun, 03 Mar 2019 00:12:37 GMT"
- ],
- "x-frame-options": [
- "Deny"
- ],
"cache-control": [
"no-store"
],
"content-type": [
"application/json"
],
+ "date": [
+ "Sun, 05 May 2019 13:31:15 GMT"
+ ],
+ "connection": [
+ "keep-alive"
+ ],
+ "strict-transport-security": [
+ "max-age=31536000; includeSubdomains; preload"
+ ],
+ "vary": [
+ "Accept-Encoding"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "content-length": [
+ "51"
+ ],
"x-content-type-options": [
"nosniff"
],
- "server": [
- "Apache/2.4.10 (Debian)"
- ],
- "set-cookie": [
- "phsid=A%2Fgw67yfcsx7vvxkymeac52ca5is4jkxjwqqkhayco; expires=Fri, 01-Mar-2024 00:12:37 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly"
- ],
- "strict-transport-security": [
- "max-age=0; includeSubdomains; preload"
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
]
+ },
+ "body": {
+ "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
}
}
},
{
"request": {
- "method": "POST",
- "body": "api.token=cli-hahayouwish&corpus=create+beta+for+phabricator+test",
- "uri": "https://phab.mercurial-scm.org//api/differential.parsecommitmessage",
"headers": {
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.0+93-d811f17090a3+20190505)"
+ ],
+ "content-length": [
+ "82"
+ ],
"content-type": [
"application/x-www-form-urlencoded"
],
"accept": [
"application/mercurial-0.1"
],
- "user-agent": [
- "mercurial/proto-1.0 (Mercurial 4.9+477-7c86ec0ca5c5+20190303)"
- ],
"host": [
"phab.mercurial-scm.org"
- ],
- "content-length": [
- "82"
]
- }
+ },
+ "body": "api.token=cli-hahayouwish&corpus=create+beta+for+phabricator+test",
+ "uri": "https://phab.mercurial-scm.org//api/differential.parsecommitmessage",
+ "method": "POST"
},
"response": {
"status": {
"code": 200,
"message": "OK"
},
- "body": {
- "string": "{\"result\":{\"errors\":[],\"fields\":{\"title\":\"create beta for phabricator test\"},\"revisionIDFieldInfo\":{\"value\":null,\"validDomain\":\"https:\\/\\/phab.mercurial-scm.org\"}},\"error_code\":null,\"error_info\":null}"
- },
"headers": {
- "expires": [
- "Sat, 01 Jan 2000 00:00:00 GMT"
- ],
"x-xss-protection": [
"1; mode=block"
],
- "transfer-encoding": [
- "chunked"
- ],
- "date": [
- "Sun, 03 Mar 2019 00:12:37 GMT"
- ],
- "x-frame-options": [
- "Deny"
- ],
"cache-control": [
"no-store"
],
"content-type": [
"application/json"
],
+ "date": [
+ "Sun, 05 May 2019 13:31:15 GMT"
+ ],
+ "connection": [
+ "keep-alive"
+ ],
+ "strict-transport-security": [
+ "max-age=31536000; includeSubdomains; preload"
+ ],
+ "vary": [
+ "Accept-Encoding"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "content-length": [
+ "282"
+ ],
"x-content-type-options": [
"nosniff"
],
- "server": [
- "Apache/2.4.10 (Debian)"
- ],
- "set-cookie": [
- "phsid=A%2Fyt5ejs6pgvjdxzms7geaxup63jpqkisngu3cprk6; expires=Fri, 01-Mar-2024 00:12:37 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly"
- ],
- "strict-transport-security": [
- "max-age=0; includeSubdomains; preload"
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
]
+ },
+ "body": {
+ "string": "{\"result\":{\"errors\":[],\"fields\":{\"title\":\"create beta for phabricator test\"},\"revisionIDFieldInfo\":{\"value\":null,\"validDomain\":\"https:\\/\\/phab.mercurial-scm.org\"},\"transactions\":[{\"type\":\"title\",\"value\":\"create beta for phabricator test\"}]},\"error_code\":null,\"error_info\":null}"
}
}
},
{
"request": {
- "method": "POST",
- "body": "transactions%5B0%5D%5Btype%5D=update&transactions%5B0%5D%5Bvalue%5D=PHID-DIFF-pofynzhmmqm2czm33teg&transactions%5B1%5D%5Btype%5D=summary&transactions%5B1%5D%5Bvalue%5D=Depends+on+D6054&transactions%5B2%5D%5Btype%5D=summary&transactions%5B2%5D%5Bvalue%5D=+&transactions%5B3%5D%5Btype%5D=title&transactions%5B3%5D%5Bvalue%5D=create+beta+for+phabricator+test&api.token=cli-hahayouwish",
- "uri": "https://phab.mercurial-scm.org//api/differential.revision.edit",
"headers": {
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.0+93-d811f17090a3+20190505)"
+ ],
+ "content-length": [
+ "398"
+ ],
"content-type": [
"application/x-www-form-urlencoded"
],
"accept": [
"application/mercurial-0.1"
],
- "user-agent": [
- "mercurial/proto-1.0 (Mercurial 4.9+477-7c86ec0ca5c5+20190303)"
- ],
"host": [
"phab.mercurial-scm.org"
- ],
- "content-length": [
- "398"
]
- }
+ },
+ "body": "api.token=cli-hahayouwish&transactions%5B0%5D%5Btype%5D=update&transactions%5B0%5D%5Bvalue%5D=PHID-DIFF-uhbyhoejzbniwwzj2q5c&transactions%5B1%5D%5Btype%5D=parents.set&transactions%5B1%5D%5Bvalue%5D%5B0%5D=PHID-DREV-kikesmfxhzpfaxbzgj3l&transactions%5B2%5D%5Btype%5D=title&transactions%5B2%5D%5Bvalue%5D=create+beta+for+phabricator+test",
+ "uri": "https://phab.mercurial-scm.org//api/differential.revision.edit",
+ "method": "POST"
},
"response": {
"status": {
"code": 200,
"message": "OK"
},
- "body": {
- "string": "{\"result\":{\"object\":{\"id\":6055,\"phid\":\"PHID-DREV-k2hin2iytzuvu3j5icm3\"},\"transactions\":[{\"phid\":\"PHID-XACT-DREV-3xjvwemev7dqsj3\"},{\"phid\":\"PHID-XACT-DREV-giypqlavgemr56i\"},{\"phid\":\"PHID-XACT-DREV-tcfqd4aj6rxtxzz\"},{\"phid\":\"PHID-XACT-DREV-2timgnudaxeln7a\"},{\"phid\":\"PHID-XACT-DREV-vb6564lrsxpsw4l\"},{\"phid\":\"PHID-XACT-DREV-maym4xi2tdhysvo\"},{\"phid\":\"PHID-XACT-DREV-bna5heyckxkk5ke\"},{\"phid\":\"PHID-XACT-DREV-b2eig3stbdic7k7\"}]},\"error_code\":null,\"error_info\":null}"
- },
"headers": {
- "expires": [
- "Sat, 01 Jan 2000 00:00:00 GMT"
- ],
"x-xss-protection": [
"1; mode=block"
],
- "transfer-encoding": [
- "chunked"
- ],
- "date": [
- "Sun, 03 Mar 2019 00:12:38 GMT"
- ],
- "x-frame-options": [
- "Deny"
- ],
"cache-control": [
"no-store"
],
"content-type": [
"application/json"
],
+ "date": [
+ "Sun, 05 May 2019 13:31:17 GMT"
+ ],
+ "connection": [
+ "keep-alive"
+ ],
+ "strict-transport-security": [
+ "max-age=31536000; includeSubdomains; preload"
+ ],
+ "vary": [
+ "Accept-Encoding"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "content-length": [
+ "420"
+ ],
"x-content-type-options": [
"nosniff"
],
- "server": [
- "Apache/2.4.10 (Debian)"
- ],
- "set-cookie": [
- "phsid=A%2Fgqyrj3op7rar26t6crqlt6rpdsxcefnrofqkw5rt; expires=Fri, 01-Mar-2024 00:12:38 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly"
- ],
- "strict-transport-security": [
- "max-age=0; includeSubdomains; preload"
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
]
+ },
+ "body": {
+ "string": "{\"result\":{\"object\":{\"id\":1191,\"phid\":\"PHID-DREV-uuyrww2k3weorn2jwcaz\"},\"transactions\":[{\"phid\":\"PHID-XACT-DREV-erc62kc5d5t53dw\"},{\"phid\":\"PHID-XACT-DREV-56jxoj2nev5we3e\"},{\"phid\":\"PHID-XACT-DREV-cajnfsuigdcmfpn\"},{\"phid\":\"PHID-XACT-DREV-expntfzlv44va6h\"},{\"phid\":\"PHID-XACT-DREV-hzrgd55fpfjcan7\"},{\"phid\":\"PHID-XACT-DREV-v4baqr7c5ydtltr\"},{\"phid\":\"PHID-XACT-DREV-ge6dwwrvrkluq2q\"}]},\"error_code\":null,\"error_info\":null}"
}
}
},
{
"request": {
- "method": "POST",
- "body": "api.token=cli-hahayouwish&ids%5B0%5D=6054&ids%5B1%5D=6055",
- "uri": "https://phab.mercurial-scm.org//api/differential.query",
"headers": {
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.0+93-d811f17090a3+20190505)"
+ ],
+ "content-length": [
+ "74"
+ ],
"content-type": [
"application/x-www-form-urlencoded"
],
"accept": [
"application/mercurial-0.1"
],
- "user-agent": [
- "mercurial/proto-1.0 (Mercurial 4.9+477-7c86ec0ca5c5+20190303)"
- ],
"host": [
"phab.mercurial-scm.org"
- ],
- "content-length": [
- "74"
]
- }
+ },
+ "body": "ids%5B0%5D=1190&ids%5B1%5D=1191&api.token=cli-hahayouwish",
+ "uri": "https://phab.mercurial-scm.org//api/differential.query",
+ "method": "POST"
},
"response": {
"status": {
"code": 200,
"message": "OK"
},
- "body": {
- "string": "{\"result\":[{\"id\":\"6055\",\"phid\":\"PHID-DREV-k2hin2iytzuvu3j5icm3\",\"title\":\"create beta for phabricator test\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/D6055\",\"dateCreated\":\"1551571958\",\"dateModified\":\"1551571958\",\"authorPHID\":\"PHID-USER-5iy6mkoveguhm2zthvww\",\"status\":\"0\",\"statusName\":\"Needs Review\",\"properties\":[],\"branch\":null,\"summary\":\" \",\"testPlan\":\"\",\"lineCount\":\"1\",\"activeDiffPHID\":\"PHID-DIFF-pofynzhmmqm2czm33teg\",\"diffs\":[\"14305\"],\"commits\":[],\"reviewers\":{\"PHID-PROJ-3dvcxzznrjru2xmmses3\":\"PHID-PROJ-3dvcxzznrjru2xmmses3\"},\"ccs\":[\"PHID-USER-q42dn7cc3donqriafhjx\"],\"hashes\":[],\"auxiliary\":{\"phabricator:projects\":[],\"phabricator:depends-on\":[\"PHID-DREV-6pczsbtdpqjc2nskmxwy\"]},\"repositoryPHID\":\"PHID-REPO-bvunnehri4u2isyr7bc3\",\"sourcePath\":null},{\"id\":\"6054\",\"phid\":\"PHID-DREV-6pczsbtdpqjc2nskmxwy\",\"title\":\"create alpha for phabricator test \\u20ac\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/D6054\",\"dateCreated\":\"1551571947\",\"dateModified\":\"1551571958\",\"authorPHID\":\"PHID-USER-5iy6mkoveguhm2zthvww\",\"status\":\"0\",\"statusName\":\"Needs Review\",\"properties\":[],\"branch\":null,\"summary\":\"\",\"testPlan\":\"\",\"lineCount\":\"2\",\"activeDiffPHID\":\"PHID-DIFF-3wv2fwmzp27uamb66xxg\",\"diffs\":[\"14304\",\"14303\"],\"commits\":[],\"reviewers\":{\"PHID-PROJ-3dvcxzznrjru2xmmses3\":\"PHID-PROJ-3dvcxzznrjru2xmmses3\"},\"ccs\":[\"PHID-USER-q42dn7cc3donqriafhjx\"],\"hashes\":[],\"auxiliary\":{\"phabricator:projects\":[],\"phabricator:depends-on\":[]},\"repositoryPHID\":\"PHID-REPO-bvunnehri4u2isyr7bc3\",\"sourcePath\":null}],\"error_code\":null,\"error_info\":null}"
- },
"headers": {
- "expires": [
- "Sat, 01 Jan 2000 00:00:00 GMT"
- ],
"x-xss-protection": [
"1; mode=block"
],
- "transfer-encoding": [
- "chunked"
- ],
- "date": [
- "Sun, 03 Mar 2019 00:12:39 GMT"
- ],
- "x-frame-options": [
- "Deny"
- ],
"cache-control": [
"no-store"
],
"content-type": [
"application/json"
],
+ "date": [
+ "Sun, 05 May 2019 13:31:17 GMT"
+ ],
+ "connection": [
+ "keep-alive"
+ ],
+ "strict-transport-security": [
+ "max-age=31536000; includeSubdomains; preload"
+ ],
+ "vary": [
+ "Accept-Encoding"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "content-length": [
+ "1514"
+ ],
"x-content-type-options": [
"nosniff"
],
- "server": [
- "Apache/2.4.10 (Debian)"
- ],
- "set-cookie": [
- "phsid=A%2F5wxg6sdf2mby5iljd5e5qpgoex6uefo5pgltav7k; expires=Fri, 01-Mar-2024 00:12:39 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly"
- ],
- "strict-transport-security": [
- "max-age=0; includeSubdomains; preload"
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
]
+ },
+ "body": {
+ "string": "{\"result\":[{\"id\":\"1191\",\"phid\":\"PHID-DREV-uuyrww2k3weorn2jwcaz\",\"title\":\"create beta for phabricator test\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/D1191\",\"dateCreated\":\"1557063076\",\"dateModified\":\"1557063077\",\"authorPHID\":\"PHID-USER-qmzis76vb2yh3ogldu6r\",\"status\":\"0\",\"statusName\":\"Draft\",\"properties\":{\"draft.broadcast\":false,\"lines.added\":1,\"lines.removed\":0},\"branch\":null,\"summary\":\" \",\"testPlan\":\"\",\"lineCount\":\"1\",\"activeDiffPHID\":\"PHID-DIFF-uhbyhoejzbniwwzj2q5c\",\"diffs\":[\"1901\"],\"commits\":[],\"reviewers\":[],\"ccs\":[],\"hashes\":[],\"auxiliary\":{\"bugzilla.bug-id\":null,\"phabricator:projects\":[\"PHID-PROJ-f2a3wl5wxtqdtfgdjqzk\"],\"phabricator:depends-on\":[\"PHID-DREV-kikesmfxhzpfaxbzgj3l\"]},\"repositoryPHID\":\"PHID-REPO-bvunnehri4u2isyr7bc3\",\"sourcePath\":null},{\"id\":\"1190\",\"phid\":\"PHID-DREV-kikesmfxhzpfaxbzgj3l\",\"title\":\"create alpha for phabricator test \\u20ac\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/D1190\",\"dateCreated\":\"1557063064\",\"dateModified\":\"1557063076\",\"authorPHID\":\"PHID-USER-qmzis76vb2yh3ogldu6r\",\"status\":\"0\",\"statusName\":\"Needs Review\",\"properties\":{\"draft.broadcast\":true,\"lines.added\":2,\"lines.removed\":0},\"branch\":null,\"summary\":\"\",\"testPlan\":\"\",\"lineCount\":\"2\",\"activeDiffPHID\":\"PHID-DIFF-gra4b3ivsgebktbeoxxx\",\"diffs\":[\"1900\",\"1899\"],\"commits\":[],\"reviewers\":[],\"ccs\":[],\"hashes\":[],\"auxiliary\":{\"bugzilla.bug-id\":null,\"phabricator:projects\":[],\"phabricator:depends-on\":[]},\"repositoryPHID\":\"PHID-REPO-bvunnehri4u2isyr7bc3\",\"sourcePath\":null}],\"error_code\":null,\"error_info\":null}"
}
}
},
{
"request": {
- "method": "POST",
- "body": "diff_id=14305&data=%7B%22user%22%3A+%22test%22%2C+%22parent%22%3A+%22939d862f03181a366fea64a540baf0bb33f85d92%22%2C+%22node%22%3A+%229c64e1fc33e1b9a70eb60643fe96a4d5badad9dc%22%2C+%22date%22%3A+%220+0%22%7D&api.token=cli-hahayouwish&name=hg%3Ameta",
- "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
"headers": {
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.0+93-d811f17090a3+20190505)"
+ ],
+ "content-length": [
+ "296"
+ ],
"content-type": [
"application/x-www-form-urlencoded"
],
"accept": [
"application/mercurial-0.1"
],
- "user-agent": [
- "mercurial/proto-1.0 (Mercurial 4.9+477-7c86ec0ca5c5+20190303)"
- ],
"host": [
"phab.mercurial-scm.org"
- ],
- "content-length": [
- "264"
]
- }
+ },
+ "body": "diff_id=1901&data=%7B%22branch%22%3A+%22default%22%2C+%22date%22%3A+%220+0%22%2C+%22node%22%3A+%222837deb84f4ab1315c1197b8aef10c620465e352%22%2C+%22parent%22%3A+%22d940d39fb603f29ea5df4b7c15f315fe6ff4e346%22%2C+%22user%22%3A+%22test%22%7D&name=hg%3Ameta&api.token=cli-hahayouwish",
+ "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
+ "method": "POST"
},
"response": {
"status": {
"code": 200,
"message": "OK"
},
- "body": {
- "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
- },
"headers": {
- "expires": [
- "Sat, 01 Jan 2000 00:00:00 GMT"
- ],
"x-xss-protection": [
"1; mode=block"
],
- "transfer-encoding": [
- "chunked"
- ],
- "date": [
- "Sun, 03 Mar 2019 00:12:40 GMT"
- ],
- "x-frame-options": [
- "Deny"
- ],
"cache-control": [
"no-store"
],
"content-type": [
"application/json"
],
+ "date": [
+ "Sun, 05 May 2019 13:31:18 GMT"
+ ],
+ "connection": [
+ "keep-alive"
+ ],
+ "strict-transport-security": [
+ "max-age=31536000; includeSubdomains; preload"
+ ],
+ "vary": [
+ "Accept-Encoding"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "content-length": [
+ "51"
+ ],
"x-content-type-options": [
"nosniff"
],
- "server": [
- "Apache/2.4.10 (Debian)"
- ],
- "set-cookie": [
- "phsid=A%2F4c7iamnsn57y6qpccmbesf4ooflmkqvt4m6udawl; expires=Fri, 01-Mar-2024 00:12:40 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly"
- ],
- "strict-transport-security": [
- "max-age=0; includeSubdomains; preload"
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
]
+ },
+ "body": {
+ "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
}
}
},
{
"request": {
- "method": "POST",
- "body": "diff_id=14305&data=%7B%229c64e1fc33e1b9a70eb60643fe96a4d5badad9dc%22%3A+%7B%22author%22%3A+%22test%22%2C+%22authorEmail%22%3A+%22test%22%2C+%22time%22%3A+0.0%7D%7D&api.token=cli-hahayouwish&name=local%3Acommits",
- "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
"headers": {
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.0+93-d811f17090a3+20190505)"
+ ],
+ "content-length": [
+ "257"
+ ],
"content-type": [
"application/x-www-form-urlencoded"
],
"accept": [
"application/mercurial-0.1"
],
- "user-agent": [
- "mercurial/proto-1.0 (Mercurial 4.9+477-7c86ec0ca5c5+20190303)"
- ],
"host": [
"phab.mercurial-scm.org"
- ],
- "content-length": [
- "227"
]
- }
+ },
+ "body": "diff_id=1901&data=%7B%222837deb84f4ab1315c1197b8aef10c620465e352%22%3A+%7B%22author%22%3A+%22test%22%2C+%22authorEmail%22%3A+%22test%22%2C+%22branch%22%3A+%22default%22%2C+%22commit%22%3A+%222837deb84f4ab1315c1197b8aef10c620465e352%22%2C+%22parents%22%3A+%5B%22d940d39fb603f29ea5df4b7c15f315fe6ff4e346%22%5D%2C+%22time%22%3A+0%7D%7D&name=local%3Acommits&api.token=cli-hahayouwish",
+ "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
+ "method": "POST"
},
"response": {
"status": {
"code": 200,
"message": "OK"
},
- "body": {
- "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
- },
"headers": {
- "expires": [
- "Sat, 01 Jan 2000 00:00:00 GMT"
- ],
"x-xss-protection": [
"1; mode=block"
],
- "transfer-encoding": [
- "chunked"
- ],
- "date": [
- "Sun, 03 Mar 2019 00:12:40 GMT"
- ],
- "x-frame-options": [
- "Deny"
- ],
"cache-control": [
"no-store"
],
"content-type": [
"application/json"
],
+ "date": [
+ "Sun, 05 May 2019 13:31:19 GMT"
+ ],
+ "connection": [
+ "keep-alive"
+ ],
+ "strict-transport-security": [
+ "max-age=31536000; includeSubdomains; preload"
+ ],
+ "vary": [
+ "Accept-Encoding"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "content-length": [
+ "51"
+ ],
"x-content-type-options": [
"nosniff"
],
- "server": [
- "Apache/2.4.10 (Debian)"
- ],
- "set-cookie": [
- "phsid=A%2Ftdudqohojcq4hyc7gl4kthzkhuq3nmcxgnunpbjm; expires=Fri, 01-Mar-2024 00:12:40 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly"
- ],
- "strict-transport-security": [
- "max-age=0; includeSubdomains; preload"
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
]
+ },
+ "body": {
+ "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
}
}
}
- ],
- "version": 1
+ ]
}
--- a/tests/run-tests.py Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/run-tests.py Mon Jul 22 14:00:33 2019 -0400
@@ -282,7 +282,16 @@
return p
-PYTHON = _bytespath(sys.executable.replace('\\', '/'))
+if sys.executable:
+ sysexecutable = sys.executable
+elif os.environ.get('PYTHONEXECUTABLE'):
+ sysexecutable = os.environ['PYTHONEXECUTABLE']
+elif os.environ.get('PYTHON'):
+ sysexecutable = os.environ['PYTHON']
+else:
+ raise AssertionError('Could not find Python interpreter')
+
+PYTHON = _bytespath(sysexecutable.replace('\\', '/'))
IMPL_PATH = b'PYTHONPATH'
if 'java' in sys.platform:
IMPL_PATH = b'JYTHONPATH'
@@ -1094,7 +1103,7 @@
env["HGRCPATH"] = _strpath(os.path.join(self._threadtmp, b'.hgrc'))
env["DAEMON_PIDS"] = _strpath(os.path.join(self._threadtmp,
b'daemon.pids'))
- env["HGEDITOR"] = ('"' + sys.executable + '"'
+ env["HGEDITOR"] = ('"' + sysexecutable + '"'
+ ' -c "import sys; sys.exit(0)"')
env["HGUSER"] = "test"
env["HGENCODING"] = "ascii"
@@ -1465,6 +1474,12 @@
script.append(b'alias pwd="pwd -W"\n')
if hgcatapult and hgcatapult != os.devnull:
+ if PYTHON3:
+ hgcatapult = hgcatapult.encode('utf8')
+ cataname = self.name.encode('utf8')
+ else:
+ cataname = self.name
+
# Kludge: use a while loop to keep the pipe from getting
# closed by our echo commands. The still-running file gets
# reaped at the end of the script, which causes the while
@@ -1481,9 +1496,9 @@
b'HGCATAPULTSESSION=%(session)s ; export HGCATAPULTSESSION\n'
b'echo START %(session)s %(name)s >> %(catapult)s\n'
% {
- 'name': self.name,
- 'session': session,
- 'catapult': hgcatapult,
+ b'name': cataname,
+ b'session': session,
+ b'catapult': hgcatapult,
}
)
@@ -2349,7 +2364,7 @@
withhg = self._runner.options.with_hg
if withhg:
opts += ' --with-hg=%s ' % shellquote(_strpath(withhg))
- rtc = '%s %s %s %s' % (sys.executable, sys.argv[0], opts,
+ rtc = '%s %s %s %s' % (sysexecutable, sys.argv[0], opts,
test)
data = pread(bisectcmd + ['--command', rtc])
m = re.search(
@@ -2913,7 +2928,7 @@
result = runner.run(suite)
- if result.failures:
+ if result.failures or result.errors:
failed = True
result.onEnd()
@@ -3003,25 +3018,25 @@
# Administrator rights.
if getattr(os, 'symlink', None) and os.name != 'nt':
vlog("# Making python executable in test path a symlink to '%s'" %
- sys.executable)
+ sysexecutable)
mypython = os.path.join(self._tmpbindir, pyexename)
try:
- if os.readlink(mypython) == sys.executable:
+ if os.readlink(mypython) == sysexecutable:
return
os.unlink(mypython)
except OSError as err:
if err.errno != errno.ENOENT:
raise
- if self._findprogram(pyexename) != sys.executable:
+ if self._findprogram(pyexename) != sysexecutable:
try:
- os.symlink(sys.executable, mypython)
+ os.symlink(sysexecutable, mypython)
self._createdfiles.append(mypython)
except OSError as err:
# child processes may race, which is harmless
if err.errno != errno.EEXIST:
raise
else:
- exedir, exename = os.path.split(sys.executable)
+ exedir, exename = os.path.split(sysexecutable)
vlog("# Modifying search path to find %s as %s in '%s'" %
(exename, pyexename, exedir))
path = os.environ['PATH'].split(os.pathsep)
@@ -3048,7 +3063,7 @@
# Run installer in hg root
script = os.path.realpath(sys.argv[0])
- exe = sys.executable
+ exe = sysexecutable
if PYTHON3:
compiler = _bytespath(compiler)
script = _bytespath(script)
@@ -3183,7 +3198,7 @@
assert os.path.dirname(self._bindir) == self._installdir
assert self._hgroot, 'must be called after _installhg()'
cmd = (b'"%(make)s" clean install PREFIX="%(prefix)s"'
- % {b'make': 'make', # TODO: switch by option or environment?
+ % {b'make': b'make', # TODO: switch by option or environment?
b'prefix': self._installdir})
cwd = os.path.join(self._hgroot, b'contrib', b'chg')
vlog("# Running", cmd)
--- a/tests/test-absorb.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-absorb.t Mon Jul 22 14:00:33 2019 -0400
@@ -176,7 +176,7 @@
b2 2:c9b20c925790
* ba 2:c9b20c925790
-Non-mofified files are ignored:
+Non-modified files are ignored:
$ touch b
$ hg commit -A b -m b
@@ -225,10 +225,15 @@
2: 4d
2: insert aftert 4d
+ $ hg co -qC 1
+ $ sedi 's/Insert/insert/' a
+ $ hg absorb --apply-changes
+ abort: no mutable changeset to change
+ [255]
+
Make working copy clean:
- $ hg revert -q -C a b
- $ hg forget c
+ $ hg co -qC ba
$ rm c
$ hg status
@@ -261,7 +266,7 @@
$ echo 2 >> m1
$ echo 2 >> m2
$ hg absorb --apply-changes
- abort: no mutable changeset to change
+ abort: cannot absorb into a merge
[255]
$ hg revert -q -C m1 m2
@@ -394,6 +399,25 @@
1 changesets affected
99b4ae7 foo
+ $ hg absorb --dry-run --interactive --print-changes
+ diff -r 99b4ae712f84 foo.py
+ 1 hunks, 1 lines changed
+ examine changes to 'foo.py'?
+ (enter ? for help) [Ynesfdaq?] y
+
+ @@ -1,1 +1,1 @@
+ -
+ +bla
+ record this change to 'foo.py'?
+ (enter ? for help) [Ynesfdaq?] y
+
+ showing changes for foo.py
+ @@ -0,1 +0,1 @@
+ 99b4ae7 -
+ 99b4ae7 +bla
+
+ 1 changesets affected
+ 99b4ae7 foo
$ hg absorb --apply-changes
1 of 1 chunk(s) applied
$ hg diff -c .
--- a/tests/test-alias.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-alias.t Mon Jul 22 14:00:33 2019 -0400
@@ -645,6 +645,10 @@
alias for: hg root
+ options:
+
+ -T --template TEMPLATE display with template
+
(use 'hg rt -h' to show more help)
[255]
--- a/tests/test-amend.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-amend.t Mon Jul 22 14:00:33 2019 -0400
@@ -146,11 +146,13 @@
> EOS
diff --git a/F b/F
new file mode 100644
- examine changes to 'F'? [Ynesfdaq?] y
+ examine changes to 'F'?
+ (enter ? for help) [Ynesfdaq?] y
diff --git a/G b/G
new file mode 100644
- examine changes to 'G'? [Ynesfdaq?] n
+ examine changes to 'G'?
+ (enter ? for help) [Ynesfdaq?] n
saved backup bundle to $TESTTMP/repo1/.hg/strip-backup/507be9bdac71-c8077452-amend.hg (obsstore-off !)
$ hg log -r . -T '{files}\n'
--- a/tests/test-annotate.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-annotate.t Mon Jul 22 14:00:33 2019 -0400
@@ -1,4 +1,7 @@
- $ HGMERGE=true; export HGMERGE
+ $ cat >> "$HGRCPATH" << EOF
+ > [ui]
+ > merge = :merge3
+ > EOF
init
@@ -210,8 +213,34 @@
created new head
$ hg merge
merging b
- 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
- (branch merge, don't forget to commit)
+ warning: conflicts while merging b! (edit, then use 'hg resolve --mark')
+ 0 files updated, 0 files merged, 0 files removed, 1 files unresolved
+ use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
+ [1]
+ $ cat b
+ a
+ a
+ a
+ <<<<<<< working copy: 5fbdc1152d97 - test: b2.1
+ b4
+ c
+ b5
+ ||||||| base
+ =======
+ b4
+ b5
+ b6
+ >>>>>>> merge rev: 37ec9f5c3d1f - test: b2
+ $ cat <<EOF > b
+ > a
+ > a
+ > a
+ > b4
+ > c
+ > b5
+ > EOF
+ $ hg resolve --mark -q
+ $ rm b.orig
$ hg ci -mmergeb -d '3 0'
annotate after merge
@@ -244,15 +273,31 @@
> EOF
$ hg ci -mc -d '3 0'
created new head
+Work around the pure version not resolving the conflict like native code
+#if pure
+ $ hg merge
+ merging b
+ warning: conflicts while merging b! (edit, then use 'hg resolve --mark')
+ 0 files updated, 0 files merged, 0 files removed, 1 files unresolved
+ use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
+ [1]
+ $ cat <<EOF > b
+ > a
+ > z
+ > a
+ > b4
+ > c
+ > b5
+ > EOF
+ $ hg resolve -m b
+ (no more unresolved files)
+ $ rm b.orig
+#else
$ hg merge
merging b
0 files updated, 1 files merged, 0 files removed, 0 files unresolved
(branch merge, don't forget to commit)
- $ cat <<EOF >> b
- > b4
- > c
- > b5
- > EOF
+#endif
$ echo d >> b
$ hg ci -mmerge2 -d '4 0'
@@ -695,8 +740,41 @@
27: baz:3+->3-
$ hg merge 25
merging baz and qux to qux
- 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
- (branch merge, don't forget to commit)
+ warning: conflicts while merging qux! (edit, then use 'hg resolve --mark')
+ 0 files updated, 0 files merged, 0 files removed, 1 files unresolved
+ use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
+ [1]
+ $ cat qux
+ 0
+ 0
+ 1 baz:1
+ 2 baz:2
+ <<<<<<< working copy: 863de62655ef - test: baz:3+->3-
+ 3- baz:3
+ 4 baz:4
+ ||||||| base
+ 3+ baz:3
+ 4 baz:4
+ =======
+ 3+ baz:3
+ 4+ baz:4
+ >>>>>>> merge rev: cb8df70ae185 - test: qux:4->4+
+ 5
+ 6
+ 7
+ $ cat > qux <<EOF
+ > 0
+ > 0
+ > 1 baz:1
+ > 2 baz:2
+ > 3- baz:3
+ > 4 baz:4
+ > 5
+ > 6
+ > 7
+ > EOF
+ $ hg resolve --mark -q
+ $ rm qux.orig
$ hg ci -m merge
$ hg log -T '{rev}: {desc}\n' -r 'followlines(qux, 5:7)'
16: baz:0
@@ -709,8 +787,40 @@
$ hg up 25 --quiet
$ hg merge 27
merging qux and baz to qux
- 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
- (branch merge, don't forget to commit)
+ warning: conflicts while merging qux! (edit, then use 'hg resolve --mark')
+ 0 files updated, 0 files merged, 0 files removed, 1 files unresolved
+ use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
+ [1]
+ $ cat qux
+ 0
+ 0
+ 1 baz:1
+ 2 baz:2
+ <<<<<<< working copy: cb8df70ae185 - test: qux:4->4+
+ 3+ baz:3
+ 4+ baz:4
+ ||||||| base
+ 3+ baz:3
+ 4 baz:4
+ =======
+ 3- baz:3
+ 4 baz:4
+ >>>>>>> merge rev: 863de62655ef - test: baz:3+->3-
+ 5
+ 6
+ 7
+ $ cat > qux <<EOF
+ > 0
+ > 0
+ > 1 baz:1
+ > 2 baz:2
+ > 3+ baz:3
+ > 4+ baz:4
+ > 5
+ > 6
+ > EOF
+ $ hg resolve --mark -q
+ $ rm qux.orig
$ hg ci -m 'merge from other side'
created new head
$ hg log -T '{rev}: {desc}\n' -r 'followlines(qux, 5:7)'
@@ -1061,6 +1171,19 @@
$ echo 3 >> a
$ hg commit -m 3 -q
$ hg merge 2 -q
+ warning: conflicts while merging a! (edit, then use 'hg resolve --mark')
+ [1]
+ $ cat a
+ <<<<<<< working copy: 0a068f0261cf - test: 3
+ 1
+ 2
+ 3
+ ||||||| base
+ 1
+ 2
+ =======
+ a
+ >>>>>>> merge rev: 9409851bc20a - test: a
$ cat > a << EOF
> b
> 1
@@ -1069,6 +1192,7 @@
> a
> EOF
$ hg resolve --mark -q
+ $ rm a.orig
$ hg commit -m m
$ hg annotate a
4: b
--- a/tests/test-backout.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-backout.t Mon Jul 22 14:00:33 2019 -0400
@@ -583,12 +583,12 @@
(branch merge, don't forget to commit)
$ hg ci -d '4 0' -m 'merge backout of branch1'
$ hg id
- 22149cdde76d (branch2) tip
+ d97a8500a969 (branch2) tip
$ hg st -A
C default
C file2
$ hg summary
- parent: 4:22149cdde76d tip
+ parent: 4:d97a8500a969 tip
merge backout of branch1
branch: branch2
commit: (clean)
--- a/tests/test-basic.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-basic.t Mon Jul 22 14:00:33 2019 -0400
@@ -98,6 +98,14 @@
$TESTTMP/t
$ hg log -l1 -T '{reporoot}\n'
$TESTTMP/t
+ $ hg root -Tjson | sed 's|\\\\|\\|g'
+ [
+ {
+ "hgpath": "$TESTTMP/t/.hg",
+ "reporoot": "$TESTTMP/t",
+ "storepath": "$TESTTMP/t/.hg/store"
+ }
+ ]
At the end...
--- a/tests/test-bisect.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-bisect.t Mon Jul 22 14:00:33 2019 -0400
@@ -600,6 +600,129 @@
summary: msg 30
+Rewritten commits should not crash
+
+ $ hg co 29
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ hg revert --all -r 30
+ reverting a
+ $ hg commit -m 'msg 30 -- fixed'
+ created new head
+ $ hg debugobsolete `hg id --debug -i -r 30` `hg id --debug -i -r .`
+ obsoleted 1 changesets
+ $ hg bisect
+ The first bad revision is:
+ changeset: 30:ed2d2f24b11c
+ user: test
+ date: Thu Jan 01 00:00:30 1970 +0000
+ obsolete: rewritten as 32:8a638ebd1122
+ summary: msg 30
+
+
+Log template does not crash
+
+ $ hg log -GTbisect -r 15::
+ @ changeset: 32:8a638ebd1122
+ | bisect: good (implicit)
+ | tag: tip
+ | parent: 29:b5bd63375ab9
+ | user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | summary: msg 30 -- fixed
+ |
+ o changeset: 29:b5bd63375ab9
+ | bisect: good
+ | user: test
+ | date: Thu Jan 01 00:00:29 1970 +0000
+ | summary: msg 29
+ |
+ o changeset: 28:8e0c2264c8af
+ | bisect: good
+ | user: test
+ | date: Thu Jan 01 00:00:28 1970 +0000
+ | summary: msg 28
+ |
+ o changeset: 27:288867a866e9
+ | bisect: ignored
+ | user: test
+ | date: Thu Jan 01 00:00:27 1970 +0000
+ | summary: msg 27
+ |
+ o changeset: 26:3efc6fd51aeb
+ | bisect: good
+ | user: test
+ | date: Thu Jan 01 00:00:26 1970 +0000
+ | summary: msg 26
+ |
+ o changeset: 25:02a84173a97a
+ | bisect: ignored
+ | user: test
+ | date: Thu Jan 01 00:00:25 1970 +0000
+ | summary: msg 25
+ |
+ o changeset: 24:10e0acd3809e
+ | bisect: ignored
+ | user: test
+ | date: Thu Jan 01 00:00:24 1970 +0000
+ | summary: msg 24
+ |
+ o changeset: 23:5ec79163bff4
+ | bisect: ignored
+ | user: test
+ | date: Thu Jan 01 00:00:23 1970 +0000
+ | summary: msg 23
+ |
+ o changeset: 22:06c7993750ce
+ | bisect: good
+ | user: test
+ | date: Thu Jan 01 00:00:22 1970 +0000
+ | summary: msg 22
+ |
+ o changeset: 21:e5db6aa3fe2a
+ | bisect: ignored
+ | user: test
+ | date: Thu Jan 01 00:00:21 1970 +0000
+ | summary: msg 21
+ |
+ o changeset: 20:7128fb4fdbc9
+ | bisect: ignored
+ | user: test
+ | date: Thu Jan 01 00:00:20 1970 +0000
+ | summary: msg 20
+ |
+ o changeset: 19:52798545b482
+ | bisect: ignored
+ | user: test
+ | date: Thu Jan 01 00:00:19 1970 +0000
+ | summary: msg 19
+ |
+ o changeset: 18:86977a90077e
+ | bisect: ignored
+ | user: test
+ | date: Thu Jan 01 00:00:18 1970 +0000
+ | summary: msg 18
+ |
+ o changeset: 17:03515f4a9080
+ | bisect: ignored
+ | user: test
+ | date: Thu Jan 01 00:00:17 1970 +0000
+ | summary: msg 17
+ |
+ o changeset: 16:a2e6ea4973e9
+ | bisect: ignored
+ | user: test
+ | date: Thu Jan 01 00:00:16 1970 +0000
+ | summary: msg 16
+ |
+ o changeset: 15:e7fa0811edb0
+ | bisect: good
+ ~ user: test
+ date: Thu Jan 01 00:00:15 1970 +0000
+ summary: msg 15
+
+ $ hg debugobsolete --delete `hg debugobsolete --index -T'{index}\n' | tail -1`
+ deleted 1 obsolescence markers
+
Changeset in the bad:good range is obsolete
---------------------------------------------
--- a/tests/test-blackbox.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-blackbox.t Mon Jul 22 14:00:33 2019 -0400
@@ -7,7 +7,7 @@
> @command(b'crash', [], b'hg crash')
> def crash(ui, *args, **kwargs):
> raise Exception("oops")
- > @command(b'abort', [], b'hg abort')
+ > @command(b'abortcmd', [], b'hg abortcmd')
> def abort(ui, *args, **kwargs):
> raise error.Abort(b"oops")
> EOF
@@ -22,9 +22,6 @@
> [alias]
> confuse = log --limit 3
> so-confusing = confuse --style compact
- > [blackbox]
- > track = backupbundle, branchcache, command, commandalias, commandexception,
- > commandfinish, debug, exthook, incoming, pythonhook, tagscache
> EOF
$ hg init blackboxtest
@@ -52,10 +49,10 @@
abort exit code
$ rm ./.hg/blackbox.log
- $ hg abort 2> /dev/null
+ $ hg abortcmd 2> /dev/null
[255]
$ hg blackbox -l 2
- 1970/01/01 00:00:00 bob @0000000000000000000000000000000000000000 (5000)> abort exited 255 after * seconds (glob)
+ 1970/01/01 00:00:00 bob @0000000000000000000000000000000000000000 (5000)> abortcmd exited 255 after * seconds (glob)
1970/01/01 00:00:00 bob @0000000000000000000000000000000000000000 (5000)> blackbox -l 2
unhandled exception
@@ -125,8 +122,8 @@
(run 'hg update' to get a working copy)
$ hg blackbox -l 6
1970/01/01 00:00:00 bob @6563da9dcf87b1949716e38ff3e3dfaa3198eb06 (5000)> pull
- 1970/01/01 00:00:00 bob @6563da9dcf87b1949716e38ff3e3dfaa3198eb06 (5000)> updated served branch cache in * seconds (glob)
- 1970/01/01 00:00:00 bob @6563da9dcf87b1949716e38ff3e3dfaa3198eb06 (5000)> wrote served branch cache with 1 labels and 2 nodes
+ 1970/01/01 00:00:00 bob @6563da9dcf87b1949716e38ff3e3dfaa3198eb06 (5000)> updated branch cache (served) in * seconds (glob)
+ 1970/01/01 00:00:00 bob @6563da9dcf87b1949716e38ff3e3dfaa3198eb06 (5000)> wrote branch cache (served) with 1 labels and 2 nodes
1970/01/01 00:00:00 bob @6563da9dcf87b1949716e38ff3e3dfaa3198eb06 (5000)> 1 incoming changes - new heads: d02f48003e62
1970/01/01 00:00:00 bob @6563da9dcf87b1949716e38ff3e3dfaa3198eb06 (5000)> pull exited 0 after * seconds (glob)
1970/01/01 00:00:00 bob @6563da9dcf87b1949716e38ff3e3dfaa3198eb06 (5000)> blackbox -l 6
@@ -189,8 +186,8 @@
$ hg blackbox -l 6
1970/01/01 00:00:00 bob @73f6ee326b27d820b0472f1a825e3a50f3dc489b (5000)> strip tip
1970/01/01 00:00:00 bob @6563da9dcf87b1949716e38ff3e3dfaa3198eb06 (5000)> saved backup bundle to $TESTTMP/blackboxtest2/.hg/strip-backup/73f6ee326b27-7612e004-backup.hg
- 1970/01/01 00:00:00 bob @6563da9dcf87b1949716e38ff3e3dfaa3198eb06 (5000)> updated base branch cache in * seconds (glob)
- 1970/01/01 00:00:00 bob @6563da9dcf87b1949716e38ff3e3dfaa3198eb06 (5000)> wrote base branch cache with 1 labels and 2 nodes
+ 1970/01/01 00:00:00 bob @6563da9dcf87b1949716e38ff3e3dfaa3198eb06 (5000)> updated branch cache (base) in * seconds (glob)
+ 1970/01/01 00:00:00 bob @6563da9dcf87b1949716e38ff3e3dfaa3198eb06 (5000)> wrote branch cache (base) with 1 labels and 2 nodes
1970/01/01 00:00:00 bob @6563da9dcf87b1949716e38ff3e3dfaa3198eb06 (5000)> strip tip exited 0 after * seconds (glob)
1970/01/01 00:00:00 bob @6563da9dcf87b1949716e38ff3e3dfaa3198eb06 (5000)> blackbox -l 6
@@ -303,8 +300,8 @@
result: 0
$ hg blackbox
1970/01/01 00:00:00 bob @45589e459b2edfbf3dbde7e01f611d2c1e7453d7 (5000)> updating the branch cache
- 1970/01/01 00:00:00 bob @45589e459b2edfbf3dbde7e01f611d2c1e7453d7 (5000)> updated served branch cache in * seconds (glob)
- 1970/01/01 00:00:00 bob @45589e459b2edfbf3dbde7e01f611d2c1e7453d7 (5000)> wrote served branch cache with 1 labels and 1 nodes
+ 1970/01/01 00:00:00 bob @45589e459b2edfbf3dbde7e01f611d2c1e7453d7 (5000)> updated branch cache (served) in * seconds (glob)
+ 1970/01/01 00:00:00 bob @45589e459b2edfbf3dbde7e01f611d2c1e7453d7 (5000)> wrote branch cache (served) with 1 labels and 1 nodes
1970/01/01 00:00:00 bob @45589e459b2edfbf3dbde7e01f611d2c1e7453d7 (5000)> --debug commit -m commit2 -d 2000-01-02 foo exited 0 after *.?? seconds (glob)
1970/01/01 00:00:00 bob @45589e459b2edfbf3dbde7e01f611d2c1e7453d7 (5000)> --debug log -r 0
1970/01/01 00:00:00 bob @45589e459b2edfbf3dbde7e01f611d2c1e7453d7 (5000)> writing .hg/cache/tags2-visible with 0 tags
--- a/tests/test-bookflow.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-bookflow.t Mon Jul 22 14:00:33 2019 -0400
@@ -215,11 +215,11 @@
$ hg diff --stat
test | 1 +
1 files changed, 1 insertions(+), 0 deletions(-)
- $ hg --config extensions.shelve= shelve
+ $ hg shelve
shelved as Z
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
$ hg pull -uq
- $ hg --trace --config extensions.shelve= unshelve
+ $ hg unshelve
unshelving change 'Z'
rebasing shelved changes
$ hg diff --stat
--- a/tests/test-bookmarks-corner-case.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-bookmarks-corner-case.t Mon Jul 22 14:00:33 2019 -0400
@@ -116,16 +116,16 @@
We build a server side extension for this purpose
$ cat > bookrace.py << EOF
+ > import atexit
> import os
> import time
- > import atexit
> from mercurial import error, extensions, bookmarks
>
> def wait(repo):
> if not os.path.exists('push-A-started'):
> assert repo._currentlock(repo._lockref) is None
> assert repo._currentlock(repo._wlockref) is None
- > print('setting raced push up')
+ > repo.ui.status(b'setting raced push up\n')
> with open('push-A-started', 'w'):
> pass
> clock = 300
--- a/tests/test-branch-change.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-branch-change.t Mon Jul 22 14:00:33 2019 -0400
@@ -181,7 +181,7 @@
starting 4 threads for background file closing (?)
changed branch on 2 changesets
updating the branch cache
- invalid branchheads cache (served): tip differs
+ invalid branch cache (served): tip differs
$ hg glog -r '(.^)::'
@ 9:de1404b45a69 Added e
--- a/tests/test-branches.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-branches.t Mon Jul 22 14:00:33 2019 -0400
@@ -280,7 +280,7 @@
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
$ hg commit -d '9 0' --close-branch -m 'close this part branch too'
$ hg commit -d '9 0' --close-branch -m 're-closing this branch'
- abort: can only close branch heads
+ abort: current revision is already a branch closing head
[255]
$ hg log -r tip --debug
@@ -940,3 +940,51 @@
0010: 56 46 78 69 00 00 00 01 |VFxi....|
$ cd ..
+
+Test to make sure that `--close-branch` only works on a branch head:
+--------------------------------------------------------------------
+ $ hg init closebranch
+ $ cd closebranch
+ $ for ch in a b c; do
+ > echo $ch > $ch
+ > hg add $ch
+ > hg ci -m "added "$ch
+ > done;
+
+ $ hg up -r "desc('added b')"
+ 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+
+trying to close branch from a cset which is not a branch head
+it should abort:
+ $ hg ci -m "closing branch" --close-branch
+ abort: can only close branch heads
+ (use --force-close-branch to close branch from a non-head changeset)
+ [255]
+
+ $ hg up 0
+ 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+ $ hg log -GT "{rev}: {node|short} {desc|firstline}\n\t{branch}\n\n"
+ o 2: 155349b645be added c
+ | default
+ |
+ o 1: 5f6d8a4bf34a added b
+ | default
+ |
+ @ 0: 9092f1db7931 added a
+ default
+
+Test --force-close-branch to close a branch from a non-head changeset:
+---------------------------------------------------------------------
+
+ $ hg show stack --config extensions.show=
+ o 1553 added c
+ o 5f6d added b
+ @ 9092 added a
+
+ $ hg ci -m "branch closed" --close-branch
+ abort: can only close branch heads
+ (use --force-close-branch to close branch from a non-head changeset)
+ [255]
+
+ $ hg ci -m "branch closed" --force-close-branch
+ created new head
--- a/tests/test-check-code.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-check-code.t Mon Jul 22 14:00:33 2019 -0400
@@ -15,6 +15,8 @@
Skipping contrib/automation/hgautomation/__init__.py it has no-che?k-code (glob)
Skipping contrib/automation/hgautomation/aws.py it has no-che?k-code (glob)
Skipping contrib/automation/hgautomation/cli.py it has no-che?k-code (glob)
+ Skipping contrib/automation/hgautomation/linux.py it has no-che?k-code (glob)
+ Skipping contrib/automation/hgautomation/ssh.py it has no-che?k-code (glob)
Skipping contrib/automation/hgautomation/windows.py it has no-che?k-code (glob)
Skipping contrib/automation/hgautomation/winrm.py it has no-che?k-code (glob)
Skipping contrib/packaging/hgpackaging/downloads.py it has no-che?k-code (glob)
--- a/tests/test-clone.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-clone.t Mon Jul 22 14:00:33 2019 -0400
@@ -719,6 +719,14 @@
$ hg -R src debugrevlog -c | egrep 'format|flags'
format : 0
flags : (none)
+ $ hg root -R src -T json | sed 's|\\\\|\\|g'
+ [
+ {
+ "hgpath": "$TESTTMP/src/.hg",
+ "reporoot": "$TESTTMP/src",
+ "storepath": "$TESTTMP/src/.hg"
+ }
+ ]
$ hg clone -U -q src dst
$ hg -R dst log -q
0:e1bab28bca43
--- a/tests/test-clonebundles.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-clonebundles.t Mon Jul 22 14:00:33 2019 -0400
@@ -53,7 +53,7 @@
$ echo 'http://does.not.exist/bundle.hg' > server/.hg/clonebundles.manifest
$ hg clone http://localhost:$HGPORT 404-url
applying clone bundle from http://does.not.exist/bundle.hg
- error fetching bundle: (.* not known|(\[Errno -?\d+])? No address associated with hostname) (re) (no-windows !)
+ error fetching bundle: (.* not known|(\[Errno -?\d+])? [Nn]o address associated with (host)?name) (re) (no-windows !)
error fetching bundle: [Errno 1100*] getaddrinfo failed (glob) (windows !)
abort: error applying bundle
(if this error persists, consider contacting the server operator or disable clone bundles via "--config ui.clonebundles=false")
--- a/tests/test-close-head.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-close-head.t Mon Jul 22 14:00:33 2019 -0400
@@ -33,7 +33,11 @@
$ hg --config extensions.closehead= close-head -m 'Not a head' -r 0 1
abort: revision is not an open head: 0
[255]
+ $ hg id
+ 000000000000
$ hg --config extensions.closehead= close-head -m 'Close old heads' -r 1 2
+ $ hg id
+ 000000000000
$ hg bookmark
@ 1:66f7d451a68b
$ hg heads
--- a/tests/test-commandserver.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-commandserver.t Mon Jul 22 14:00:33 2019 -0400
@@ -917,13 +917,13 @@
> raise error.Abort(b'fail after finalization')
> def reposetup(ui, repo):
> class failrepo(repo.__class__):
- > def commitctx(self, ctx, error=False):
+ > def commitctx(self, ctx, error=False, origctx=None):
> if self.ui.configbool(b'failafterfinalize', b'fail'):
> # 'sorted()' by ASCII code on category names causes
> # invoking 'fail' after finalization of changelog
> # using "'cl-%i' % id(self)" as category name
> self.currenttransaction().addfinalize(b'zzzzzzzz', fail)
- > return super(failrepo, self).commitctx(ctx, error)
+ > return super(failrepo, self).commitctx(ctx, error, origctx)
> repo.__class__ = failrepo
> EOF
--- a/tests/test-commit-amend.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-commit-amend.t Mon Jul 22 14:00:33 2019 -0400
@@ -649,7 +649,7 @@
(no more unresolved files)
$ hg ci -m 'merge bar'
$ hg log --config diff.git=1 -pr .
- changeset: 20:163cfd7219f7
+ changeset: 20:5aba7f3726e6
tag: tip
parent: 19:30d96aeaf27b
parent: 18:1aa437659d19
@@ -682,7 +682,7 @@
$ HGEDITOR="sh .hg/checkeditform.sh" hg ci --amend -m 'merge bar (amend message)' --edit
HGEDITFORM=commit.amend.merge
$ hg log --config diff.git=1 -pr .
- changeset: 21:bca52d4ed186
+ changeset: 21:4b0631ef043e
tag: tip
parent: 19:30d96aeaf27b
parent: 18:1aa437659d19
@@ -715,7 +715,7 @@
$ hg mv zz z
$ hg ci --amend -m 'merge bar (undo rename)'
$ hg log --config diff.git=1 -pr .
- changeset: 22:12594a98ca3f
+ changeset: 22:06423be42d60
tag: tip
parent: 19:30d96aeaf27b
parent: 18:1aa437659d19
@@ -751,9 +751,9 @@
$ echo aa >> aaa
$ hg ci -m 'merge bar again'
$ hg log --config diff.git=1 -pr .
- changeset: 24:dffde028b388
+ changeset: 24:a89974a20457
tag: tip
- parent: 22:12594a98ca3f
+ parent: 22:06423be42d60
parent: 23:4c94d5bc65f5
user: test
date: Thu Jan 01 00:00:00 1970 +0000
@@ -783,12 +783,26 @@
$ hg debugrename aaa
aaa renamed from aa:37d9b5d994eab34eda9c16b195ace52c7b129980
+
+Update to p1 with 'aaa' modified. 'aaa' was renamed from 'aa' in p2. 'aa' exists
+in p1 too, but it was recorded as copied from p2.
+ $ echo modified >> aaa
+ $ hg co -m '.^' -t :merge3
+ file 'aaa' was deleted in other [destination] but was modified in local [working copy].
+ You can use (c)hanged version, (d)elete, or leave (u)nresolved.
+ What do you want to do? u
+ 1 files updated, 0 files merged, 1 files removed, 1 files unresolved
+ use 'hg resolve' to retry unresolved file merges
+ [1]
+ $ hg co -C tip
+ 2 files updated, 0 files merged, 1 files removed, 0 files unresolved
+
$ hg mv aaa aa
$ hg ci --amend -m 'merge bar again (undo rename)'
$ hg log --config diff.git=1 -pr .
- changeset: 25:18e3ba160489
+ changeset: 25:282080768800
tag: tip
- parent: 22:12594a98ca3f
+ parent: 22:06423be42d60
parent: 23:4c94d5bc65f5
user: test
date: Thu Jan 01 00:00:00 1970 +0000
@@ -825,13 +839,13 @@
> c
> EOF
file 'aa' was deleted in other [merge rev] but was modified in local [working copy].
- What do you want to do?
- use (c)hanged version, (d)elete, or leave (u)nresolved? c
+ You can use (c)hanged version, (d)elete, or leave (u)nresolved.
+ What do you want to do? c
$ hg ci -m 'merge bar (with conflicts)'
$ hg log --config diff.git=1 -pr .
- changeset: 28:b4c3035e2544
+ changeset: 28:ed15db12298d
tag: tip
- parent: 27:4b216ca5ba97
+ parent: 27:eb5adec0b43b
parent: 26:67db8847a540
user: test
date: Thu Jan 01 00:00:00 1970 +0000
@@ -841,9 +855,9 @@
$ hg rm aa
$ hg ci --amend -m 'merge bar (with conflicts, amended)'
$ hg log --config diff.git=1 -pr .
- changeset: 29:1205ed810051
+ changeset: 29:0eeafd043f63
tag: tip
- parent: 27:4b216ca5ba97
+ parent: 27:eb5adec0b43b
parent: 26:67db8847a540
user: test
date: Thu Jan 01 00:00:00 1970 +0000
@@ -878,7 +892,7 @@
marked working directory as branch silliness
$ echo b >> b
$ hg ci --close-branch -m'open and close'
- abort: can only close branch heads
+ abort: branch "silliness" has no heads to close
[255]
Test that amend with --secret creates new secret changeset forcibly
@@ -916,6 +930,27 @@
$ hg parents --template "{desc}\n"
editor should be invoked
+Test that amend with --no-edit avoids the editor
+------------------------------------------------
+
+ $ hg commit --amend -m "before anything happens"
+ $ hg parents --template "{desc}\n"
+ before anything happens
+ $ HGEDITOR=cat hg commit --amend --no-edit -m "editor should be suppressed"
+ $ hg parents --template "{desc}\n"
+ editor should be suppressed
+
+(We need a file change here since we won't have a message change)
+ $ cp foo foo.orig
+ $ echo hi >> foo
+ $ HGEDITOR=cat hg commit --amend --no-edit
+ $ hg parents --template "{desc}\n"
+ editor should be suppressed
+ $ hg status -mar
+(Let's undo adding that "hi" so later tests don't need to be adjusted)
+ $ mv foo.orig foo
+ $ hg commit --amend --no-edit
+
Test that "diff()" in committemplate works correctly for amending
-----------------------------------------------------------------
@@ -939,7 +974,7 @@
HG: M:
HG: A: foo
HG: R:
- HG: diff -r 1205ed810051 foo
+ HG: diff -r 0eeafd043f63 foo
HG: --- /dev/null Thu Jan 01 00:00:00 1970 +0000
HG: +++ b/foo Thu Jan 01 00:00:00 1970 +0000
HG: @@ -0,0 +1,1 @@
@@ -953,12 +988,12 @@
HG: M:
HG: A: foo y
HG: R:
- HG: diff -r 1205ed810051 foo
+ HG: diff -r 0eeafd043f63 foo
HG: --- /dev/null Thu Jan 01 00:00:00 1970 +0000
HG: +++ b/foo Thu Jan 01 00:00:00 1970 +0000
HG: @@ -0,0 +1,1 @@
HG: +foo
- HG: diff -r 1205ed810051 y
+ HG: diff -r 0eeafd043f63 y
HG: --- /dev/null Thu Jan 01 00:00:00 1970 +0000
HG: +++ b/y Thu Jan 01 00:00:00 1970 +0000
HG: @@ -0,0 +1,1 @@
@@ -971,18 +1006,18 @@
HG: M:
HG: A: foo y
HG: R: a
- HG: diff -r 1205ed810051 a
+ HG: diff -r 0eeafd043f63 a
HG: --- a/a Thu Jan 01 00:00:00 1970 +0000
HG: +++ /dev/null Thu Jan 01 00:00:00 1970 +0000
HG: @@ -1,2 +0,0 @@
HG: -a
HG: -a
- HG: diff -r 1205ed810051 foo
+ HG: diff -r 0eeafd043f63 foo
HG: --- /dev/null Thu Jan 01 00:00:00 1970 +0000
HG: +++ b/foo Thu Jan 01 00:00:00 1970 +0000
HG: @@ -0,0 +1,1 @@
HG: +foo
- HG: diff -r 1205ed810051 y
+ HG: diff -r 0eeafd043f63 y
HG: --- /dev/null Thu Jan 01 00:00:00 1970 +0000
HG: +++ b/y Thu Jan 01 00:00:00 1970 +0000
HG: @@ -0,0 +1,1 @@
@@ -995,23 +1030,23 @@
HG: M:
HG: A: foo y
HG: R: a x
- HG: diff -r 1205ed810051 a
+ HG: diff -r 0eeafd043f63 a
HG: --- a/a Thu Jan 01 00:00:00 1970 +0000
HG: +++ /dev/null Thu Jan 01 00:00:00 1970 +0000
HG: @@ -1,2 +0,0 @@
HG: -a
HG: -a
- HG: diff -r 1205ed810051 foo
+ HG: diff -r 0eeafd043f63 foo
HG: --- /dev/null Thu Jan 01 00:00:00 1970 +0000
HG: +++ b/foo Thu Jan 01 00:00:00 1970 +0000
HG: @@ -0,0 +1,1 @@
HG: +foo
- HG: diff -r 1205ed810051 x
+ HG: diff -r 0eeafd043f63 x
HG: --- a/x Thu Jan 01 00:00:00 1970 +0000
HG: +++ /dev/null Thu Jan 01 00:00:00 1970 +0000
HG: @@ -1,1 +0,0 @@
HG: -x
- HG: diff -r 1205ed810051 y
+ HG: diff -r 0eeafd043f63 y
HG: --- /dev/null Thu Jan 01 00:00:00 1970 +0000
HG: +++ b/y Thu Jan 01 00:00:00 1970 +0000
HG: @@ -0,0 +1,1 @@
@@ -1026,23 +1061,23 @@
HG: M:
HG: A: foo y
HG: R: a x
- HG: diff -r 1205ed810051 a
+ HG: diff -r 0eeafd043f63 a
HG: --- a/a Thu Jan 01 00:00:00 1970 +0000
HG: +++ /dev/null Thu Jan 01 00:00:00 1970 +0000
HG: @@ -1,2 +0,0 @@
HG: -a
HG: -a
- HG: diff -r 1205ed810051 foo
+ HG: diff -r 0eeafd043f63 foo
HG: --- /dev/null Thu Jan 01 00:00:00 1970 +0000
HG: +++ b/foo Thu Jan 01 00:00:00 1970 +0000
HG: @@ -0,0 +1,1 @@
HG: +foo
- HG: diff -r 1205ed810051 x
+ HG: diff -r 0eeafd043f63 x
HG: --- a/x Thu Jan 01 00:00:00 1970 +0000
HG: +++ /dev/null Thu Jan 01 00:00:00 1970 +0000
HG: @@ -1,1 +0,0 @@
HG: -x
- HG: diff -r 1205ed810051 y
+ HG: diff -r 0eeafd043f63 y
HG: --- /dev/null Thu Jan 01 00:00:00 1970 +0000
HG: +++ b/y Thu Jan 01 00:00:00 1970 +0000
HG: @@ -0,0 +1,1 @@
--- a/tests/test-commit-interactive-curses.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-commit-interactive-curses.t Mon Jul 22 14:00:33 2019 -0400
@@ -34,8 +34,8 @@
$ echo "a" > a
$ hg add a
$ cat <<EOF >testModeCommands
- > TOGGLE
- > X
+ > x
+ > c
> EOF
$ hg commit -i -m "a" -d "0 0"
no changes to record
@@ -50,7 +50,7 @@
Committing some changes
$ cat <<EOF >testModeCommands
- > X
+ > c
> EOF
$ hg commit -i -m "a" -d "0 0"
$ hg tip
@@ -71,9 +71,9 @@
>>> open('b', 'wb').write(b"1\n2\n3\n4\n5\n6\n7\n8\n9\n10\n") and None
$ hg add b
$ cat <<EOF >testModeCommands
- > TOGGLE
+ > x
> KEY_DOWN
- > X
+ > c
> EOF
$ hg commit -i -m "one file" -d "0 0"
$ hg tip
@@ -112,11 +112,11 @@
> KEY_DOWN
> KEY_DOWN
> KEY_DOWN
- > TOGGLE
+ > x
> a
> a
> e
- > X
+ > c
> EOF
$ HGEDITOR="\"sh\" \"`pwd`/editor.sh\"" hg commit -i -m "one hunk" -d "0 0"
editor ran
@@ -182,9 +182,9 @@
$ echo "hello" > x
$ hg add x
$ cat <<EOF >testModeCommands
- > TOGGLE
- > TOGGLE
- > X
+ > x
+ > x
+ > c
> EOF
$ hg st
A x
@@ -203,7 +203,7 @@
+hello
$ cat <<EOF >testModeCommands
> a
- > X
+ > c
> EOF
$ hg commit -i -m "newly added file" -d "0 0"
saved backup bundle to $TESTTMP/a/.hg/strip-backup/2b0e9be4d336-3cf0bc8c-amend.hg
@@ -217,7 +217,7 @@
Make file empty
$ printf "" > x
$ cat <<EOF >testModeCommands
- > X
+ > c
> EOF
$ hg ci -i -m emptify -d "0 0"
$ hg update -C '.^' -q
@@ -240,8 +240,8 @@
> KEY_DOWN
> KEY_DOWN
> e
- > TOGGLE
- > X
+ > x
+ > c
> EOF
$ printf "printf 'editor ran\n'; exit 0" > editor.sh
$ HGEDITOR="\"sh\" \"`pwd`/editor.sh\"" hg commit -i -m "edit hunk" -d "0 0" -q
@@ -272,11 +272,11 @@
> EOF
$ cat > testModeCommands <<EOF
> KEY_DOWN
- > TOGGLE
+ > x
> KEY_DOWN
> f
> KEY_DOWN
- > TOGGLE
+ > x
> R
> EOF
@@ -327,6 +327,50 @@
hello world
lower
+Test range select: unselect 3, 5, and 6, reselect 5, then go back up to 2 and
+press 'X', unselecting (because 2 is currently selected) 5 (because it's the
+start of the range) and 4, leaving 3 unselected.
+
+ $ hg init $TESTTMP/range_select
+ $ cd $TESTTMP/range_select
+ >>> open('range_select', 'wb').write(b"1\n2\n3\n4\n5\n6\n7\n8\n9\n10\n") and None
+ $ hg add range_select
+ $ cat <<EOF >testModeCommands
+ > KEY_RIGHT
+ > KEY_RIGHT
+ > KEY_DOWN
+ > KEY_DOWN
+ > KEY_ENTER
+ > KEY_DOWN
+ > KEY_ENTER
+ > x
+ > KEY_UP
+ > x
+ > KEY_UP
+ > KEY_UP
+ > KEY_UP
+ > X
+ > c
+ > EOF
+ $ hg commit -i -m "range_select" -d "0 0"
+ $ hg cat -r tip range_select
+ 1
+ 7
+ 8
+ 9
+ 10
+ $ cat range_select
+ 1
+ 2
+ 3
+ 4
+ 5
+ 6
+ 7
+ 8
+ 9
+ 10
+
Check ui.interface logic for the chunkselector
The default interface is text
--- a/tests/test-commit-interactive.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-commit-interactive.t Mon Jul 22 14:00:33 2019 -0400
@@ -100,7 +100,8 @@
diff --git a/empty-rw b/empty-rename
rename from empty-rw
rename to empty-rename
- examine changes to 'empty-rw' and 'empty-rename'? [Ynesfdaq?] y
+ examine changes to 'empty-rw' and 'empty-rename'?
+ (enter ? for help) [Ynesfdaq?] y
$ hg tip -p
@@ -121,7 +122,8 @@
diff --git a/empty-rename b/empty-copy
copy from empty-rename
copy to empty-copy
- examine changes to 'empty-rename' and 'empty-copy'? [Ynesfdaq?] y
+ examine changes to 'empty-rename' and 'empty-copy'?
+ (enter ? for help) [Ynesfdaq?] y
$ hg tip -p
@@ -141,7 +143,8 @@
> EOF
diff --git a/empty-copy b/empty-copy
deleted file mode 100644
- examine changes to 'empty-copy'? [Ynesfdaq?] y
+ examine changes to 'empty-copy'?
+ (enter ? for help) [Ynesfdaq?] y
$ hg tip -p
@@ -164,7 +167,8 @@
diff --git a/tip.bundle b/tip.bundle
new file mode 100644
this is a binary file
- examine changes to 'tip.bundle'? [Ynesfdaq?] y
+ examine changes to 'tip.bundle'?
+ (enter ? for help) [Ynesfdaq?] y
$ hg tip -p
@@ -187,7 +191,8 @@
> EOF
diff --git a/tip.bundle b/tip.bundle
this modifies a binary file (all or nothing)
- examine changes to 'tip.bundle'? [Ynesfdaq?] y
+ examine changes to 'tip.bundle'?
+ (enter ? for help) [Ynesfdaq?] y
$ hg tip -p
@@ -213,7 +218,8 @@
rename from tip.bundle
rename to top.bundle
this modifies a binary file (all or nothing)
- examine changes to 'tip.bundle' and 'top.bundle'? [Ynesfdaq?] y
+ examine changes to 'tip.bundle' and 'top.bundle'?
+ (enter ? for help) [Ynesfdaq?] y
$ hg tip -p
@@ -253,7 +259,8 @@
+8
+9
+10
- record this change to 'plain'? [Ynesfdaq?] y
+ record this change to 'plain'?
+ (enter ? for help) [Ynesfdaq?] y
$ hg tip -p
changeset: 7:11fb457c1be4
@@ -302,7 +309,8 @@
9
10
+11
- record this change to 'plain'? [Ynesfdaq?] y
+ record this change to 'plain'?
+ (enter ? for help) [Ynesfdaq?] y
Modify end of plain file, no EOL
@@ -320,7 +328,8 @@
11
+7264f99c5f5ff3261504828afa4fb4d406c3af54
\ No newline at end of file
- record this change to 'plain'? [Ynesfdaq?] y
+ record this change to 'plain'?
+ (enter ? for help) [Ynesfdaq?] y
Record showfunc should preserve function across sections
@@ -401,7 +410,8 @@
> EOF
diff --git a/f1.py b/f1.py
3 hunks, 6 lines changed
- examine changes to 'f1.py'? [Ynesfdaq?] y
+ examine changes to 'f1.py'?
+ (enter ? for help) [Ynesfdaq?] y
@@ -2,8 +2,8 @@ def annotate(ui, repo, *pats, **opts):
"""show changeset information by line for each file
@@ -413,7 +423,8 @@
This command is useful for discovering when a change was made and
by whom.
- record change 1/3 to 'f1.py'? [Ynesfdaq?] y
+ record change 1/3 to 'f1.py'?
+ (enter ? for help) [Ynesfdaq?] y
@@ -6,11 +6,7 @@ def annotate(ui, repo, *pats, **opts):
@@ -427,7 +438,8 @@
Without the -a/--text option, annotate will avoid processing files
it detects as binary. With -a, annotate will annotate the file
anyway, although the results will probably be neither useful
- record change 2/3 to 'f1.py'? [Ynesfdaq?] y
+ record change 2/3 to 'f1.py'?
+ (enter ? for help) [Ynesfdaq?] y
@@ -26,7 +22,7 @@ def archive(ui, repo, dest, **opts):
directory; use -r/--rev to specify a different revision.
@@ -438,7 +450,8 @@
.. container:: verbose
- record change 3/3 to 'f1.py'? [Ynesfdaq?] y
+ record change 3/3 to 'f1.py'?
+ (enter ? for help) [Ynesfdaq?] y
Modify end of plain file, add EOL
@@ -461,13 +474,15 @@
-7264f99c5f5ff3261504828afa4fb4d406c3af54
\ No newline at end of file
+7264f99c5f5ff3261504828afa4fb4d406c3af54
- record change 1/2 to 'plain'? [Ynesfdaq?] y
+ record change 1/2 to 'plain'?
+ (enter ? for help) [Ynesfdaq?] y
diff --git a/plain2 b/plain2
new file mode 100644
@@ -0,0 +1,1 @@
+1
- record change 2/2 to 'plain2'? [Ynesfdaq?] y
+ record change 2/2 to 'plain2'?
+ (enter ? for help) [Ynesfdaq?] y
Modify beginning, trim end, record both, add another file to test
changes numbering
@@ -493,7 +508,8 @@
2
3
4
- record change 1/3 to 'plain'? [Ynesfdaq?] y
+ record change 1/3 to 'plain'?
+ (enter ? for help) [Ynesfdaq?] y
@@ -8,5 +8,3 @@ 7
8
@@ -501,14 +517,16 @@
10
-11
-7264f99c5f5ff3261504828afa4fb4d406c3af54
- record change 2/3 to 'plain'? [Ynesfdaq?] y
+ record change 2/3 to 'plain'?
+ (enter ? for help) [Ynesfdaq?] y
diff --git a/plain2 b/plain2
1 hunks, 1 lines changed
@@ -1,1 +1,2 @@
1
+2
- record change 3/3 to 'plain2'? [Ynesfdaq?] y
+ record change 3/3 to 'plain2'?
+ (enter ? for help) [Ynesfdaq?] y
$ hg tip -p
@@ -566,7 +584,8 @@
7
8
9
- record change 1/2 to 'plain'? [Ynesfdaq?] n
+ record change 1/2 to 'plain'?
+ (enter ? for help) [Ynesfdaq?] n
@@ -4,7 +1,7 @@
4
@@ -577,7 +596,8 @@
9
-10
+10.new
- record change 2/2 to 'plain'? [Ynesfdaq?] y
+ record change 2/2 to 'plain'?
+ (enter ? for help) [Ynesfdaq?] y
$ hg tip -p
@@ -613,7 +633,8 @@
4
5
6
- record this change to 'plain'? [Ynesfdaq?] y
+ record this change to 'plain'?
+ (enter ? for help) [Ynesfdaq?] y
$ hg tip -p
@@ -660,7 +681,8 @@
7
8
9
- record change 1/2 to 'plain'? [Ynesfdaq?] n
+ record change 1/2 to 'plain'?
+ (enter ? for help) [Ynesfdaq?] n
@@ -1,7 +4,6 @@
4
@@ -670,7 +692,8 @@
8
9
-10.new
- record change 2/2 to 'plain'? [Ynesfdaq?] y
+ record change 2/2 to 'plain'?
+ (enter ? for help) [Ynesfdaq?] y
Add to beginning, middle, end
@@ -695,7 +718,8 @@
+3
4
5
- record change 1/3 to 'plain'? [Ynesfdaq?] y
+ record change 1/3 to 'plain'?
+ (enter ? for help) [Ynesfdaq?] y
@@ -1,6 +4,8 @@
4
@@ -706,7 +730,8 @@
7
8
9
- record change 2/3 to 'plain'? [Ynesfdaq?] y
+ record change 2/3 to 'plain'?
+ (enter ? for help) [Ynesfdaq?] y
@@ -3,4 +8,6 @@
6
@@ -715,7 +740,8 @@
9
+10
+11
- record change 3/3 to 'plain'? [Ynesfdaq?] n
+ record change 3/3 to 'plain'?
+ (enter ? for help) [Ynesfdaq?] n
$ hg tip -p
@@ -755,7 +781,8 @@
9
+10
+11
- record this change to 'plain'? [Ynesfdaq?] y
+ record this change to 'plain'?
+ (enter ? for help) [Ynesfdaq?] y
$ hg tip -p
@@ -786,11 +813,13 @@
adding subdir/a
diff --git a/subdir/a b/subdir/a
new file mode 100644
- examine changes to 'subdir/a'? [Ynesfdaq?] y
+ examine changes to 'subdir/a'?
+ (enter ? for help) [Ynesfdaq?] y
@@ -0,0 +1,1 @@
+a
- record this change to 'subdir/a'? [Ynesfdaq?] y
+ record this change to 'subdir/a'?
+ (enter ? for help) [Ynesfdaq?] y
$ cd subdir
@@ -804,7 +833,8 @@
@@ -1,1 +1,2 @@
a
+a
- record this change to 'subdir/a'? [Ynesfdaq?] y
+ record this change to 'subdir/a'?
+ (enter ? for help) [Ynesfdaq?] y
$ hg tip -p
@@ -839,7 +869,8 @@
> EOF
diff --git a/subdir/f1 b/subdir/f1
1 hunks, 1 lines changed
- examine changes to 'subdir/f1'? [Ynesfdaq?] ?
+ examine changes to 'subdir/f1'?
+ (enter ? for help) [Ynesfdaq?] ?
y - yes, record this change
n - no, skip this change
@@ -850,7 +881,8 @@
a - record all changes to all remaining files
q - quit, recording no changes
? - ? (display help)
- examine changes to 'subdir/f1'? [Ynesfdaq?] q
+ examine changes to 'subdir/f1'?
+ (enter ? for help) [Ynesfdaq?] q
abort: user quit
[255]
@@ -865,21 +897,25 @@
> EOF
diff --git a/subdir/f1 b/subdir/f1
1 hunks, 1 lines changed
- examine changes to 'subdir/f1'? [Ynesfdaq?] y
+ examine changes to 'subdir/f1'?
+ (enter ? for help) [Ynesfdaq?] y
@@ -1,1 +1,2 @@
a
+a
- record change 1/2 to 'subdir/f1'? [Ynesfdaq?] n
+ record change 1/2 to 'subdir/f1'?
+ (enter ? for help) [Ynesfdaq?] n
diff --git a/subdir/f2 b/subdir/f2
1 hunks, 1 lines changed
- examine changes to 'subdir/f2'? [Ynesfdaq?] y
+ examine changes to 'subdir/f2'?
+ (enter ? for help) [Ynesfdaq?] y
@@ -1,1 +1,2 @@
b
+b
- record change 2/2 to 'subdir/f2'? [Ynesfdaq?] n
+ record change 2/2 to 'subdir/f2'?
+ (enter ? for help) [Ynesfdaq?] n
no changes to record
[1]
@@ -936,11 +972,13 @@
> EOF
diff --git a/subdir/f1 b/subdir/f1
1 hunks, 1 lines changed
- examine changes to 'subdir/f1'? [Ynesfdaq?] s
+ examine changes to 'subdir/f1'?
+ (enter ? for help) [Ynesfdaq?] s
diff --git a/subdir/f2 b/subdir/f2
1 hunks, 1 lines changed
- examine changes to 'subdir/f2'? [Ynesfdaq?] abort: response expected
+ examine changes to 'subdir/f2'?
+ (enter ? for help) [Ynesfdaq?] abort: response expected
[255]
No
@@ -950,11 +988,13 @@
> EOF
diff --git a/subdir/f1 b/subdir/f1
1 hunks, 1 lines changed
- examine changes to 'subdir/f1'? [Ynesfdaq?] n
+ examine changes to 'subdir/f1'?
+ (enter ? for help) [Ynesfdaq?] n
diff --git a/subdir/f2 b/subdir/f2
1 hunks, 1 lines changed
- examine changes to 'subdir/f2'? [Ynesfdaq?] abort: response expected
+ examine changes to 'subdir/f2'?
+ (enter ? for help) [Ynesfdaq?] abort: response expected
[255]
f, quit
@@ -965,11 +1005,13 @@
> EOF
diff --git a/subdir/f1 b/subdir/f1
1 hunks, 1 lines changed
- examine changes to 'subdir/f1'? [Ynesfdaq?] f
+ examine changes to 'subdir/f1'?
+ (enter ? for help) [Ynesfdaq?] f
diff --git a/subdir/f2 b/subdir/f2
1 hunks, 1 lines changed
- examine changes to 'subdir/f2'? [Ynesfdaq?] q
+ examine changes to 'subdir/f2'?
+ (enter ? for help) [Ynesfdaq?] q
abort: user quit
[255]
@@ -982,11 +1024,13 @@
> EOF
diff --git a/subdir/f1 b/subdir/f1
1 hunks, 1 lines changed
- examine changes to 'subdir/f1'? [Ynesfdaq?] s
+ examine changes to 'subdir/f1'?
+ (enter ? for help) [Ynesfdaq?] s
diff --git a/subdir/f2 b/subdir/f2
1 hunks, 1 lines changed
- examine changes to 'subdir/f2'? [Ynesfdaq?] a
+ examine changes to 'subdir/f2'?
+ (enter ? for help) [Ynesfdaq?] a
$ hg tip -p
@@ -1011,7 +1055,8 @@
> EOF
diff --git a/subdir/f1 b/subdir/f1
1 hunks, 1 lines changed
- examine changes to 'subdir/f1'? [Ynesfdaq?] f
+ examine changes to 'subdir/f1'?
+ (enter ? for help) [Ynesfdaq?] f
$ hg tip -p
@@ -1044,13 +1089,15 @@
old mode 100644
new mode 100755
1 hunks, 1 lines changed
- examine changes to 'subdir/f1'? [Ynesfdaq?] y
+ examine changes to 'subdir/f1'?
+ (enter ? for help) [Ynesfdaq?] y
@@ -1,2 +1,3 @@
a
a
+a
- record this change to 'subdir/f1'? [Ynesfdaq?] y
+ record this change to 'subdir/f1'?
+ (enter ? for help) [Ynesfdaq?] y
$ hg tip --config diff.git=True -p
@@ -1081,14 +1128,16 @@
> EOF
diff --git a/subdir/f1 b/subdir/f1
1 hunks, 1 lines changed
- examine changes to 'subdir/f1'? [Ynesfdaq?] y
+ examine changes to 'subdir/f1'?
+ (enter ? for help) [Ynesfdaq?] y
@@ -1,3 +1,4 @@
a
a
a
+b
- record this change to 'subdir/f1'? [Ynesfdaq?] y
+ record this change to 'subdir/f1'?
+ (enter ? for help) [Ynesfdaq?] y
$ hg tip --config diff.git=True -p
@@ -1121,14 +1170,16 @@
old mode 100755
new mode 100644
1 hunks, 1 lines changed
- examine changes to 'subdir/f1'? [Ynesfdaq?] y
+ examine changes to 'subdir/f1'?
+ (enter ? for help) [Ynesfdaq?] y
@@ -2,3 +2,4 @@ a
a
a
b
+c
- record this change to 'subdir/f1'? [Ynesfdaq?] y
+ record this change to 'subdir/f1'?
+ (enter ? for help) [Ynesfdaq?] y
$ hg tip --config diff.git=True -p
@@ -1165,13 +1216,15 @@
> EOF
diff --git a/subdir/f1 b/subdir/f1
1 hunks, 1 lines changed
- examine changes to 'subdir/f1'? [Ynesfdaq?] y
+ examine changes to 'subdir/f1'?
+ (enter ? for help) [Ynesfdaq?] y
@@ -1,2 +1,3 @@
a
a
+a
- record this change to 'subdir/f1'? [Ynesfdaq?] y
+ record this change to 'subdir/f1'?
+ (enter ? for help) [Ynesfdaq?] y
$ hg tip --config diff.git=True -p
@@ -1200,14 +1253,16 @@
> EOF
diff --git a/subdir/f1 b/subdir/f1
1 hunks, 1 lines changed
- examine changes to 'subdir/f1'? [Ynesfdaq?] y
+ examine changes to 'subdir/f1'?
+ (enter ? for help) [Ynesfdaq?] y
@@ -1,3 +1,4 @@
a
a
a
+b
- record this change to 'subdir/f1'? [Ynesfdaq?] y
+ record this change to 'subdir/f1'?
+ (enter ? for help) [Ynesfdaq?] y
$ hg tip --config diff.git=True -p
@@ -1238,14 +1293,16 @@
> EOF
diff --git a/subdir/f1 b/subdir/f1
1 hunks, 1 lines changed
- examine changes to 'subdir/f1'? [Ynesfdaq?] y
+ examine changes to 'subdir/f1'?
+ (enter ? for help) [Ynesfdaq?] y
@@ -2,3 +2,4 @@ a
a
a
b
+c
- record this change to 'subdir/f1'? [Ynesfdaq?] y
+ record this change to 'subdir/f1'?
+ (enter ? for help) [Ynesfdaq?] y
$ hg tip --config diff.git=True -p
@@ -1323,7 +1380,8 @@
> EOF
diff --git a/editedfile b/editedfile
1 hunks, 2 lines changed
- examine changes to 'editedfile'? [Ynesfdaq?] y
+ examine changes to 'editedfile'?
+ (enter ? for help) [Ynesfdaq?] y
@@ -1,3 +1,3 @@
-This is the first line
@@ -1331,7 +1389,8 @@
+This line has changed
+This change will be committed
This is the third line
- record this change to 'editedfile'? [Ynesfdaq?] e
+ record this change to 'editedfile'?
+ (enter ? for help) [Ynesfdaq?] e
$ cat editedfile
This line has changed
@@ -1352,10 +1411,12 @@
> EOF
diff --git a/editedfile b/editedfile
1 hunks, 1 lines changed
- examine changes to 'editedfile'? [Ynesfdaq?] e
+ examine changes to 'editedfile'?
+ (enter ? for help) [Ynesfdaq?] e
cannot edit patch for whole file
- examine changes to 'editedfile'? [Ynesfdaq?] q
+ examine changes to 'editedfile'?
+ (enter ? for help) [Ynesfdaq?] q
abort: user quit
[255]
@@ -1376,7 +1437,8 @@
> EOF
diff --git a/editedfile b/editedfile
1 hunks, 3 lines changed
- examine changes to 'editedfile'? [Ynesfdaq?] y
+ examine changes to 'editedfile'?
+ (enter ? for help) [Ynesfdaq?] y
@@ -1,3 +1,3 @@
-This is the first line
@@ -1385,7 +1447,8 @@
+This change will not be committed
+This is the second line
+This line has been added
- record this change to 'editedfile'? [Ynesfdaq?] e
+ record this change to 'editedfile'?
+ (enter ? for help) [Ynesfdaq?] e
no changes to record
[1]
@@ -1414,7 +1477,8 @@
> EOF
diff --git a/editedfile b/editedfile
1 hunks, 3 lines changed
- examine changes to 'editedfile'? [Ynesfdaq?] y
+ examine changes to 'editedfile'?
+ (enter ? for help) [Ynesfdaq?] y
@@ -1,3 +1,3 @@
-This is the first line
@@ -1423,7 +1487,8 @@
+This change will not be committed
+This is the second line
+This line has been added
- record this change to 'editedfile'? [Ynesfdaq?] e
+ record this change to 'editedfile'?
+ (enter ? for help) [Ynesfdaq?] e
patching file editedfile
Hunk #1 FAILED at 0
@@ -1461,7 +1526,8 @@
> EOF
diff --git a/editedfile b/editedfile
1 hunks, 3 lines changed
- examine changes to 'editedfile'? [Ynesfdaq?] y
+ examine changes to 'editedfile'?
+ (enter ? for help) [Ynesfdaq?] y
@@ -1,3 +1,3 @@
-This is the first line
@@ -1470,7 +1536,8 @@
+This change will not be committed
+This is the second line
+This line has been added
- record this change to 'editedfile'? [Ynesfdaq?] e
+ record this change to 'editedfile'?
+ (enter ? for help) [Ynesfdaq?] e
abort: error parsing patch: unhandled transition: range -> range
[255]
@@ -1485,7 +1552,8 @@
> EOF
diff --git a/editedfile b/editedfile
1 hunks, 3 lines changed
- examine changes to 'editedfile'? [Ynesfdaq?] y
+ examine changes to 'editedfile'?
+ (enter ? for help) [Ynesfdaq?] y
@@ -1,3 +1,3 @@
-This is the first line
@@ -1494,10 +1562,12 @@
+This change will not be committed
+This is the second line
+This line has been added
- record this change to 'editedfile'? [Ynesfdaq?] e
+ record this change to 'editedfile'?
+ (enter ? for help) [Ynesfdaq?] e
editor exited with exit code 1
- record this change to 'editedfile'? [Ynesfdaq?] n
+ record this change to 'editedfile'?
+ (enter ? for help) [Ynesfdaq?] n
no changes to record
[1]
@@ -1516,7 +1586,8 @@
> EOF
diff --git a/editedfile b/editedfile
1 hunks, 3 lines changed
- examine changes to 'editedfile'? [Ynesfdaq?] y
+ examine changes to 'editedfile'?
+ (enter ? for help) [Ynesfdaq?] y
@@ -1,3 +1,3 @@
-This is the first line
@@ -1525,7 +1596,8 @@
+This change will not be committed
+This is the second line
+This line has been added
- record this change to 'editedfile'? [Ynesfdaq?] e
+ record this change to 'editedfile'?
+ (enter ? for help) [Ynesfdaq?] e
abort: error parsing patch: unhandled transition: file -> other
[255]
@@ -1556,14 +1628,16 @@
> EOF
diff --git a/subdir/f1 b/subdir/f1
1 hunks, 1 lines changed
- examine changes to 'subdir/f1'? [Ynesfdaq?] y
+ examine changes to 'subdir/f1'?
+ (enter ? for help) [Ynesfdaq?] y
@@ -3,3 +3,4 @@ a
a
b
c
+d
- record this change to 'subdir/f1'? [Ynesfdaq?] y
+ record this change to 'subdir/f1'?
+ (enter ? for help) [Ynesfdaq?] y
$ hg status -A subdir/f1
@@ -1595,14 +1669,16 @@
> EOF
diff --git a/subdir/f1 b/subdir/f1
1 hunks, 1 lines changed
- examine changes to 'subdir/f1'? [Ynesfdaq?] y
+ examine changes to 'subdir/f1'?
+ (enter ? for help) [Ynesfdaq?] y
@@ -4,3 +4,4 @@ a
b
c
d
+e
- record this change to 'subdir/f1'? [Ynesfdaq?] y
+ record this change to 'subdir/f1'?
+ (enter ? for help) [Ynesfdaq?] y
$ hg status -A subdir/f1
C subdir/f1
@@ -1626,14 +1702,16 @@
rename from plain
rename to plain3
1 hunks, 1 lines changed
- examine changes to 'plain' and 'plain3'? [Ynesfdaq?] y
+ examine changes to 'plain' and 'plain3'?
+ (enter ? for help) [Ynesfdaq?] y
@@ -11,3 +11,4 @@ 8
9
10
11
+somechange
- record this change to 'plain3'? [Ynesfdaq?] y
+ record this change to 'plain3'?
+ (enter ? for help) [Ynesfdaq?] y
The #if execbit block above changes the hash here on some systems
$ hg status -A plain3
@@ -1665,13 +1743,15 @@
> EOF
diff --git a/newfile b/newfile
new file mode 100644
- examine changes to 'newfile'? [Ynesfdaq?] y
+ examine changes to 'newfile'?
+ (enter ? for help) [Ynesfdaq?] y
@@ -0,0 +1,3 @@
+This is the first line
+This is the second line
+This is the third line
- record this change to 'newfile'? [Ynesfdaq?] e
+ record this change to 'newfile'?
+ (enter ? for help) [Ynesfdaq?] e
$ hg cat -r tip newfile
This is the very line
@@ -1696,11 +1776,13 @@
> EOF
diff --git a/folder/bar b/folder/bar
new file mode 100644
- examine changes to 'folder/bar'? [Ynesfdaq?] y
+ examine changes to 'folder/bar'?
+ (enter ? for help) [Ynesfdaq?] y
@@ -0,0 +1,1 @@
+foo
- record this change to 'folder/bar'? [Ynesfdaq?] y
+ record this change to 'folder/bar'?
+ (enter ? for help) [Ynesfdaq?] y
The #if execbit block above changes the hashes here on some systems
$ hg tip -p
@@ -1781,7 +1863,8 @@
> EOF
diff --git a/subdir/f1 b/subdir/f1
2 hunks, 2 lines changed
- examine changes to 'subdir/f1'? [Ynesfdaq?] y
+ examine changes to 'subdir/f1'?
+ (enter ? for help) [Ynesfdaq?] y
@@ -1,6 +1,6 @@
-a
@@ -1791,7 +1874,8 @@
b
c
d
- record change 1/2 to 'subdir/f1'? [Ynesfdaq?] y
+ record change 1/2 to 'subdir/f1'?
+ (enter ? for help) [Ynesfdaq?] y
@@ -2,6 +2,6 @@
a
@@ -1801,7 +1885,8 @@
d
-e
+E
- record change 2/2 to 'subdir/f1'? [Ynesfdaq?] n
+ record change 2/2 to 'subdir/f1'?
+ (enter ? for help) [Ynesfdaq?] n
$ cat >> .hg/hgrc <<EOF
> [extensions]
@@ -1837,15 +1922,18 @@
$ printf 'y\ny\ny\n' | hg ci -im initial --config commands.commit.interactive.unified=0
diff --git a/foo b/foo
2 hunks, 2 lines changed
- examine changes to 'foo'? [Ynesfdaq?] y
+ examine changes to 'foo'?
+ (enter ? for help) [Ynesfdaq?] y
@@ -1,0 +2,1 @@ 1
+change1
- record change 1/2 to 'foo'? [Ynesfdaq?] y
+ record change 1/2 to 'foo'?
+ (enter ? for help) [Ynesfdaq?] y
@@ -3,0 +5,1 @@ 3
+change2
- record change 2/2 to 'foo'? [Ynesfdaq?] y
+ record change 2/2 to 'foo'?
+ (enter ? for help) [Ynesfdaq?] y
$ cd $TESTTMP
@@ -1873,14 +1961,16 @@
$ printf 'y\ny\ny\n' | hg ci -im initial --config diff.ignoreblanklines=1
diff --git a/foo b/foo
2 hunks, 2 lines changed
- examine changes to 'foo'? [Ynesfdaq?] y
+ examine changes to 'foo'?
+ (enter ? for help) [Ynesfdaq?] y
@@ -1,3 +1,4 @@
1
+
2
3
- record change 1/2 to 'foo'? [Ynesfdaq?] y
+ record change 1/2 to 'foo'?
+ (enter ? for help) [Ynesfdaq?] y
@@ -2,4 +3,5 @@
2
@@ -1888,6 +1978,7 @@
+change2
4
5
- record change 2/2 to 'foo'? [Ynesfdaq?] y
+ record change 2/2 to 'foo'?
+ (enter ? for help) [Ynesfdaq?] y
--- a/tests/test-commit-unresolved.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-commit-unresolved.t Mon Jul 22 14:00:33 2019 -0400
@@ -1,3 +1,11 @@
+#testcases abortcommand abortflag
+#if abortflag
+ $ cat >> $HGRCPATH <<EOF
+ > [alias]
+ > abort = merge --abort
+ > EOF
+#endif
+
$ addcommit () {
> echo $1 > $1
> hg add $1
@@ -36,9 +44,11 @@
Testing the abort functionality first in case of conflicts
- $ hg merge --abort
- abort: no merge in progress
+ $ hg abort
+ abort: no merge in progress (abortflag !)
+ abort: no operation in progress (abortcommand !)
[255]
+
$ hg merge
merging A
warning: conflicts while merging A! (edit, then use 'hg resolve --mark')
@@ -53,7 +63,13 @@
abort: cannot specify both --rev and --abort
[255]
- $ hg merge --abort
+#if abortcommand
+when in dry-run mode
+ $ hg abort --dry-run
+ merge in progress, will be aborted
+#endif
+
+ $ hg abort
aborting the merge, updating back to e45016d2b3d3
1 files updated, 0 files merged, 1 files removed, 0 files unresolved
@@ -131,7 +147,7 @@
abort: cannot specify --preview with --abort
[255]
- $ hg merge --abort
+ $ hg abort
aborting the merge, updating back to 68352a18a7c4
1 files updated, 0 files merged, 1 files removed, 0 files unresolved
--- a/tests/test-commit.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-commit.t Mon Jul 22 14:00:33 2019 -0400
@@ -708,6 +708,8 @@
abort: path contains illegal component: HG8B6C~2/hgrc
[255]
+ $ cd ..
+
# test that an unmodified commit template message aborts
$ hg init unmodified_commit_template
@@ -734,6 +736,8 @@
abort: commit message unchanged
[255]
+ $ cd ..
+
test that text below the --- >8 --- special string is ignored
$ cat <<'EOF' > $TESTTMP/lowercaseline.sh
@@ -834,3 +838,42 @@
second line
$ cd ..
+
+testing commands.commit.post-status config option
+
+ $ hg init ci-post-st
+ $ cd ci-post-st
+ $ echo '[commands]' > .hg/hgrc
+ $ echo 'commit.post-status = 1' >> .hg/hgrc
+
+ $ echo 'ignored-file' > .hgignore
+ $ hg ci -qAm 0
+
+ $ echo 'c' > clean-file
+ $ echo 'a' > added-file
+ $ echo '?' > unknown-file
+ $ echo 'i' > ignored-file
+ $ hg add clean-file added-file
+ $ hg ci -m 1 clean-file
+ A added-file
+ ? unknown-file
+ $ hg st -mardu
+ A added-file
+ ? unknown-file
+
+ $ touch modified-file
+ $ hg add modified-file
+ $ hg ci -m 2 modified-file -q
+
+ $ echo 'm' > modified-file
+ $ hg ci --amend -m 'reworded' -X 're:'
+ saved backup bundle to $TESTTMP/ci-post-st/.hg/strip-backup/*-amend.hg (glob)
+ M modified-file
+ A added-file
+ ? unknown-file
+ $ hg st -mardu
+ M modified-file
+ A added-file
+ ? unknown-file
+
+ $ cd ..
--- a/tests/test-completion.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-completion.t Mon Jul 22 14:00:33 2019 -0400
@@ -1,5 +1,6 @@
Show all commands except debug commands
$ hg debugcomplete
+ abort
add
addremove
annotate
@@ -14,6 +15,7 @@
clone
commit
config
+ continue
copy
diff
export
@@ -45,18 +47,21 @@
rollback
root
serve
+ shelve
status
summary
tag
tags
tip
unbundle
+ unshelve
update
verify
version
Show all commands that start with "a"
$ hg debugcomplete a
+ abort
add
addremove
annotate
@@ -228,11 +233,12 @@
Show an error if we use --options with an ambiguous abbreviation
$ hg debugcomplete --options s
hg: command 's' is ambiguous:
- serve showconfig status summary
+ serve shelve showconfig status summary
[255]
Show all commands + options
$ hg debugcommands
+ abort: dry-run
add: include, exclude, subrepos, dry-run
addremove: similarity, subrepos, include, exclude, dry-run
annotate: rev, follow, no-follow, text, user, file, date, number, changeset, line-number, skip, ignore-all-space, ignore-space-change, ignore-blank-lines, ignore-space-at-eol, include, exclude, template
@@ -245,8 +251,9 @@
bundle: force, rev, branch, base, all, type, ssh, remotecmd, insecure
cat: output, rev, decode, include, exclude, template
clone: noupdate, updaterev, rev, branch, pull, uncompressed, stream, ssh, remotecmd, insecure
- commit: addremove, close-branch, amend, secret, edit, interactive, include, exclude, message, logfile, date, user, subrepos
+ commit: addremove, close-branch, amend, secret, edit, force-close-branch, interactive, include, exclude, message, logfile, date, user, subrepos
config: untrusted, edit, local, global, template
+ continue: dry-run
copy: after, force, include, exclude, dry-run
debugancestor:
debugapplystreamclonebundle:
@@ -338,16 +345,18 @@
resolve: all, list, mark, unmark, no-status, re-merge, tool, include, exclude, template
revert: all, date, rev, no-backup, interactive, include, exclude, dry-run
rollback: dry-run, force
- root:
+ root: template
serve: accesslog, daemon, daemon-postexec, errorlog, port, address, prefix, name, web-conf, webdir-conf, pid-file, stdio, cmdserver, templates, style, ipv6, certificate, print-url, subrepos
+ shelve: addremove, unknown, cleanup, date, delete, edit, keep, list, message, name, patch, interactive, stat, include, exclude
status: all, modified, added, removed, deleted, clean, unknown, ignored, no-status, terse, copies, print0, rev, change, include, exclude, subrepos, template
summary: remote
tag: force, local, rev, remove, edit, message, date, user
tags: template
tip: patch, git, style, template
unbundle: update
+ unshelve: abort, continue, interactive, keep, name, tool, date
update: clean, check, merge, date, rev, tool
- verify:
+ verify: full
version: template
$ hg init a
--- a/tests/test-contrib-perf.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-contrib-perf.t Mon Jul 22 14:00:33 2019 -0400
@@ -55,6 +55,13 @@
"presleep"
number of second to wait before any group of runs (default: 1)
+ "pre-run"
+ number of run to perform before starting measurement.
+
+ "profile-benchmark"
+ Enable profiling for the benchmarked section. (The first iteration is
+ benchmarked)
+
"run-limits"
Control the number of runs each benchmark will perform. The option value
should be a list of '<time>-<numberofrun>' pairs. After each run the
@@ -117,6 +124,9 @@
perffncachewrite
(no help text available)
perfheads benchmark the computation of a changelog heads
+ perfhelper-mergecopies
+ find statistics about potential parameters for
+ 'perfmergecopies'
perfhelper-pathcopies
find statistic about potential parameters for the
'perftracecopies'
@@ -134,6 +144,8 @@
usable
perfmergecalculate
(no help text available)
+ perfmergecopies
+ measure runtime of 'copies.mergecopies'
perfmoonwalk benchmark walking the changelog backwards
perfnodelookup
(no help text available)
@@ -327,6 +339,34 @@
}
]
+Test pre-run feature
+--------------------
+
+(perf discovery has some spurious output)
+
+ $ hg perfdiscovery . --config perf.stub=no --config perf.run-limits='0.000000001-1' --config perf.pre-run=0
+ ! wall * comb * user * sys * (best of 1) (glob)
+ searching for changes
+ $ hg perfdiscovery . --config perf.stub=no --config perf.run-limits='0.000000001-1' --config perf.pre-run=1
+ ! wall * comb * user * sys * (best of 1) (glob)
+ searching for changes
+ searching for changes
+ $ hg perfdiscovery . --config perf.stub=no --config perf.run-limits='0.000000001-1' --config perf.pre-run=3
+ ! wall * comb * user * sys * (best of 1) (glob)
+ searching for changes
+ searching for changes
+ searching for changes
+ searching for changes
+
+test profile-benchmark option
+------------------------------
+
+Function to check that statprof ran
+ $ statprofran () {
+ > egrep 'Sample count:|No samples recorded' > /dev/null
+ > }
+ $ hg perfdiscovery . --config perf.stub=no --config perf.run-limits='0.000000001-1' --config perf.profile-benchmark=yes 2>&1 | statprofran
+
Check perf.py for historical portability
----------------------------------------
--- a/tests/test-convert-bzr-merges.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-convert-bzr-merges.t Mon Jul 22 14:00:33 2019 -0400
@@ -59,7 +59,7 @@
$ glog -R source-hg
o 5@source "(octopus merge fixup)" files+: [], files-: [], files: [renamed]
|\
- | o 4@source "Merged branches" files+: [file-branch1 file-branch2 renamed], files-: [rename_me], files: [file]
+ | o 4@source "Merged branches" files+: [file-branch2 renamed], files-: [rename_me], files: []
| |\
o---+ 3@source-branch2 "Added brach2 file" files+: [file-branch2 renamed], files-: [rename_me], files: []
/ /
@@ -154,7 +154,7 @@
$ glog -R hg2hg
@ 5@source "(octopus merge fixup)" files+: [], files-: [], files: []
|\
- | o 4@source "Merged branches" files+: [file-branch1 file-branch2 renamed], files-: [rename_me], files: [file]
+ | o 4@source "Merged branches" files+: [file-branch2 renamed], files-: [rename_me], files: []
| |\
o---+ 3@source-branch2 "Added brach2 file" files+: [file-branch2 renamed], files-: [rename_me], files: []
/ /
--- a/tests/test-convert-bzr.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-convert-bzr.t Mon Jul 22 14:00:33 2019 -0400
@@ -147,7 +147,7 @@
1 Editing b
0 Merged improve branch
$ glog -R source-hg
- o 3@source "Merged improve branch" files+: [], files-: [], files: [b]
+ o 3@source "Merged improve branch" files+: [], files-: [], files: []
|\
| o 2@source-improve "Editing b" files+: [], files-: [], files: [b]
| |
--- a/tests/test-convert-filemap.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-convert-filemap.t Mon Jul 22 14:00:33 2019 -0400
@@ -760,9 +760,8 @@
converted/b
x
$ hg -R merge-test2 log -G -T '{shortest(node)} {desc}\n{files % "- {file}\n"}\n'
- o 6eaa merge a & b
+ o e2ff merge a & b
|\ - converted/a
- | | - toberemoved
| |
| o 2995 add b
| | - converted/b
--- a/tests/test-convert-hg-sink.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-convert-hg-sink.t Mon Jul 22 14:00:33 2019 -0400
@@ -573,3 +573,52 @@
M f
A b-only
R a-only
+
+Recorded {files} list does not get confused about flags on merge commits
+
+#if execbit
+ $ cd ..
+ $ hg init merge-flags-orig
+ $ cd merge-flags-orig
+ $ echo 0 > 0
+ $ hg ci -Aqm 'add 0'
+ $ echo a > a
+ $ chmod +x a
+ $ hg ci -qAm 'add executable file'
+ $ hg co -q 0
+ $ echo b > b
+ $ hg ci -qAm 'add file'
+ $ hg merge -q
+ $ hg ci -m 'merge'
+ $ hg log -G -T '{rev} {desc}\n'
+ @ 3 merge
+ |\
+ | o 2 add file
+ | |
+ o | 1 add executable file
+ |/
+ o 0 add 0
+
+
+# No files changed
+ $ hg log -r 3 -T '{files}\n'
+
+
+ $ cd ..
+ $ hg convert merge-flags-orig merge-flags-new -q
+ $ cd merge-flags-new
+ $ hg log -G -T '{rev} {desc}\n'
+ o 3 merge
+ |\
+ | o 2 add file
+ | |
+ o | 1 add executable file
+ |/
+ o 0 add 0
+
+
+# Still no files
+ $ hg log -r 3 -T '{files}\n'
+
+
+#endif
--- a/tests/test-convert-hg-startrev.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-convert-hg-startrev.t Mon Jul 22 14:00:33 2019 -0400
@@ -54,7 +54,7 @@
$ glog full
o 5 "5: change a" files: a
|
- o 4 "4: merge 2 and 3" files: e f
+ o 4 "4: merge 2 and 3" files: e
|\
| o 3 "3: change a" files: a
| |
@@ -83,7 +83,7 @@
$ glog full
o 5 "5: change a" files: a
|
- o 4 "4: merge 2 and 3" files: e f
+ o 4 "4: merge 2 and 3" files: e
|\
| o 3 "3: change a" files: a
| |
@@ -130,7 +130,7 @@
(It seems like a bug in log that the following doesn't show rev 1.)
$ hg log --follow --copies e
- changeset: 2:82bbac3d2cf4
+ changeset: 2:8d3c3fe67bb7
user: test
date: Thu Jan 01 00:00:04 1970 +0000
summary: 4: merge 2 and 3
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-convert-identity.t Mon Jul 22 14:00:33 2019 -0400
@@ -0,0 +1,40 @@
+Testing that convert.hg.preserve-hash=true can be used to make hg
+convert from hg repo to hg repo preserve hashes, even if the
+computation of the files list in commits change slightly between hg
+versions.
+
+ $ cat <<'EOF' >> "$HGRCPATH"
+ > [extensions]
+ > convert =
+ > EOF
+ $ cat <<'EOF' > changefileslist.py
+ > from mercurial import (changelog, extensions)
+ > def wrap(orig, clog, manifest, files, *args, **kwargs):
+ > return orig(clog, manifest, [b"a"], *args, **kwargs)
+ > def extsetup(ui):
+ > extensions.wrapfunction(changelog.changelog, 'add', wrap)
+ > EOF
+
+ $ hg init repo
+ $ cd repo
+ $ echo a > a; hg commit -qAm a
+ $ echo b > a; hg commit -qAm b
+ $ hg up -qr 0; echo c > c; hg commit -qAm c
+ $ hg merge -qr 1
+ $ hg commit -m_ --config extensions.x=../changefileslist.py
+ $ hg log -r . -T '{node|short} {files|json}\n'
+ c085bbe93d59 ["a"]
+
+Now that we have a commit with a files list that's not what the
+current hg version would create, check that convert either fixes it or
+keeps it depending on config:
+
+ $ hg convert -q . ../convert
+ $ hg --cwd ../convert log -r tip -T '{node|short} {files|json}\n'
+ b7c4d4bbacd3 []
+ $ rm -rf ../convert
+
+ $ hg convert -q . ../convert --config convert.hg.preserve-hash=true
+ $ hg --cwd ../convert log -r tip -T '{node|short} {files|json}\n'
+ c085bbe93d59 ["a"]
+ $ rm -rf ../convert
--- a/tests/test-convert.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-convert.t Mon Jul 22 14:00:33 2019 -0400
@@ -373,6 +373,11 @@
records the given string as a 'convert_source' extra value
on each commit made in the target repository. The default is
None.
+ convert.hg.preserve-hash
+ only works with mercurial sources. Make convert prevent
+ performance improvement to the list of modified files in
+ commits when such an improvement would cause the hash of a
+ commit to change. The default is False.
All Destinations
################
--- a/tests/test-copies-in-changeset.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-copies-in-changeset.t Mon Jul 22 14:00:33 2019 -0400
@@ -5,8 +5,11 @@
> copies.read-from=changeset-only
> [alias]
> changesetcopies = log -r . -T 'files: {files}
+ > {extras % "{ifcontains("files", key, "{key}: {value}\n")}"}
> {extras % "{ifcontains("copies", key, "{key}: {value}\n")}"}'
> showcopies = log -r . -T '{file_copies % "{source} -> {name}\n"}'
+ > [extensions]
+ > rebase =
> EOF
Check that copies are recorded correctly
@@ -22,9 +25,13 @@
$ hg ci -m 'copy a to b, c, and d'
$ hg changesetcopies
files: b c d
- p1copies: b\x00a (esc)
- c\x00a (esc)
- d\x00a (esc)
+ filesadded: 0
+ 1
+ 2
+
+ p1copies: 0\x00a (esc)
+ 1\x00a (esc)
+ 2\x00a (esc)
$ hg showcopies
a -> b
a -> c
@@ -41,7 +48,10 @@
$ hg ci -m 'rename b to b2'
$ hg changesetcopies
files: b b2
- p1copies: b2\x00b (esc)
+ filesadded: 1
+ filesremoved: 0
+
+ p1copies: 1\x00b (esc)
$ hg showcopies
b -> b2
@@ -58,7 +68,8 @@
$ hg ci -m 'move b onto d'
$ hg changesetcopies
files: c
- p1copies: c\x00b2 (esc)
+
+ p1copies: 0\x00b2 (esc)
$ hg showcopies
b2 -> c
$ hg debugindex c
@@ -86,9 +97,13 @@
$ hg ci -m 'merge'
$ hg changesetcopies
files: g h i
- p1copies: g\x00a (esc)
- i\x00f (esc)
- p2copies: h\x00d (esc)
+ filesadded: 0
+ 1
+ 2
+
+ p1copies: 0\x00a (esc)
+ 2\x00f (esc)
+ p2copies: 1\x00d (esc)
$ hg showcopies
a -> g
d -> h
@@ -100,7 +115,11 @@
$ hg ci -m 'copy a to j' --config experimental.copies.write-to=compatibility
$ hg changesetcopies
files: j
- p1copies: j\x00a (esc)
+ filesadded: 0
+ filesremoved:
+
+ p1copies: 0\x00a (esc)
+ p2copies:
$ hg debugdata j 0
\x01 (esc)
copy: a
@@ -113,6 +132,17 @@
a -> j
$ hg showcopies --config experimental.copies.read-from=filelog-only
a -> j
+The entries should be written to extras even if they're empty (so the client
+won't have to fall back to reading from filelogs)
+ $ echo x >> j
+ $ hg ci -m 'modify j' --config experimental.copies.write-to=compatibility
+ $ hg changesetcopies
+ files: j
+ filesadded:
+ filesremoved:
+
+ p1copies:
+ p2copies:
Test writing only to filelog
@@ -120,6 +150,7 @@
$ hg ci -m 'copy a to k' --config experimental.copies.write-to=filelog-only
$ hg changesetcopies
files: k
+
$ hg debugdata k 0
\x01 (esc)
copy: a
@@ -133,3 +164,24 @@
a -> k
$ cd ..
+
+Test rebasing a commit with copy information
+
+ $ hg init rebase-rename
+ $ cd rebase-rename
+ $ echo a > a
+ $ hg ci -Aqm 'add a'
+ $ echo a2 > a
+ $ hg ci -m 'modify a'
+ $ hg co -q 0
+ $ hg mv a b
+ $ hg ci -qm 'rename a to b'
+ $ hg rebase -d 1 --config rebase.experimental.inmemory=yes
+ rebasing 2:fc7287ac5b9b "rename a to b" (tip)
+ merging a and b to b
+ saved backup bundle to $TESTTMP/rebase-rename/.hg/strip-backup/fc7287ac5b9b-8f2a95ec-rebase.hg
+ $ hg st --change . --copies
+ A b
+ a
+ R a
+ $ cd ..
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-copies-unrelated.t Mon Jul 22 14:00:33 2019 -0400
@@ -0,0 +1,389 @@
+#testcases filelog compatibility changeset
+
+ $ cat >> $HGRCPATH << EOF
+ > [extensions]
+ > rebase=
+ > [alias]
+ > l = log -G -T '{rev} {desc}\n{files}\n'
+ > EOF
+
+#if compatibility
+ $ cat >> $HGRCPATH << EOF
+ > [experimental]
+ > copies.read-from = compatibility
+ > EOF
+#endif
+
+#if changeset
+ $ cat >> $HGRCPATH << EOF
+ > [experimental]
+ > copies.read-from = changeset-only
+ > copies.write-to = changeset-only
+ > EOF
+#endif
+
+ $ REPONUM=0
+ $ newrepo() {
+ > cd $TESTTMP
+ > REPONUM=`expr $REPONUM + 1`
+ > hg init repo-$REPONUM
+ > cd repo-$REPONUM
+ > }
+
+Copy a file, then delete destination, then copy again. This does not create a new filelog entry.
+ $ newrepo
+ $ echo x > x
+ $ hg ci -Aqm 'add x'
+ $ echo x2 > x
+ $ hg ci -m 'modify x'
+ $ hg co -q 0
+ $ hg cp x y
+ $ hg ci -qm 'copy x to y'
+ $ hg rm y
+ $ hg ci -m 'remove y'
+ $ hg cp -f x y
+ $ hg ci -m 'copy x onto y (again)'
+ $ hg l
+ @ 4 copy x onto y (again)
+ | y
+ o 3 remove y
+ | y
+ o 2 copy x to y
+ | y
+ | o 1 modify x
+ |/ x
+ o 0 add x
+ x
+ $ hg debugp1copies -r 4
+ x -> y
+ $ hg debugpathcopies 0 4
+ x -> y
+ $ hg graft -r 1
+ grafting 1:* "modify x" (glob)
+ merging y and x to y
+ $ hg co -qC 1
+ $ hg graft -r 4
+ grafting 4:* "copy x onto y (again)" (glob)
+ merging x and y to y
+
+Copy x to y, then remove y, then add back y. With copy metadata in the
+changeset, this could easily end up reporting y as copied from x (if we don't
+unmark it as a copy when it's removed). Despite x and y not being related, we
+want grafts to propagate across the rename.
+ $ newrepo
+ $ echo x > x
+ $ hg ci -Aqm 'add x'
+ $ echo x2 > x
+ $ hg ci -m 'modify x'
+ $ hg co -q 0
+ $ hg mv x y
+ $ hg ci -qm 'rename x to y'
+ $ hg rm y
+ $ hg ci -qm 'remove y'
+ $ echo x > y
+ $ hg ci -Aqm 'add back y'
+ $ hg l
+ @ 4 add back y
+ | y
+ o 3 remove y
+ | y
+ o 2 rename x to y
+ | x y
+ | o 1 modify x
+ |/ x
+ o 0 add x
+ x
+ $ hg debugpathcopies 0 4
+BROKEN: This should succeed and merge the changes from x into y
+ $ hg graft -r 1
+ grafting 1:* "modify x" (glob)
+ file 'x' was deleted in local [local] but was modified in other [graft].
+ You can use (c)hanged version, leave (d)eleted, or leave (u)nresolved.
+ What do you want to do? u
+ abort: unresolved conflicts, can't continue
+ (use 'hg resolve' and 'hg graft --continue')
+ [255]
+
+Add x, remove it, then add it back, then rename x to y. Similar to the case
+above, but here the break in history is before the rename.
+ $ newrepo
+ $ echo x > x
+ $ hg ci -Aqm 'add x'
+ $ echo x2 > x
+ $ hg ci -m 'modify x'
+ $ hg co -q 0
+ $ hg rm x
+ $ hg ci -qm 'remove x'
+ $ echo x > x
+ $ hg ci -Aqm 'add x again'
+ $ hg mv x y
+ $ hg ci -m 'rename x to y'
+ $ hg l
+ @ 4 rename x to y
+ | x y
+ o 3 add x again
+ | x
+ o 2 remove x
+ | x
+ | o 1 modify x
+ |/ x
+ o 0 add x
+ x
+ $ hg debugpathcopies 0 4
+ x -> y
+ $ hg graft -r 1
+ grafting 1:* "modify x" (glob)
+ merging y and x to y
+ $ hg co -qC 1
+ $ hg graft -r 4
+ grafting 4:* "rename x to y" (glob)
+ merging x and y to y
+
+Add x, modify it, remove it, then add it back, then rename x to y. Similar to
+the case above, but here the re-added file's nodeid is different from before
+the break.
+
+ $ newrepo
+ $ echo x > x
+ $ hg ci -Aqm 'add x'
+ $ echo x2 > x
+ $ hg ci -m 'modify x'
+ $ echo x3 > x
+ $ hg ci -qm 'modify x again'
+ $ hg co -q 1
+ $ hg rm x
+ $ hg ci -qm 'remove x'
+# Same content to avoid conflicts
+ $ hg revert -r 1 x
+ $ hg ci -Aqm 'add x again'
+ $ hg mv x y
+ $ hg ci -m 'rename x to y'
+ $ hg l
+ @ 5 rename x to y
+ | x y
+ o 4 add x again
+ | x
+ o 3 remove x
+ | x
+ | o 2 modify x again
+ |/ x
+ o 1 modify x
+ | x
+ o 0 add x
+ x
+ $ hg debugpathcopies 0 5
+ x -> y (no-filelog !)
+#if no-filelog
+ $ hg graft -r 2
+ grafting 2:* "modify x again" (glob)
+ merging y and x to y
+#else
+BROKEN: This should succeed and merge the changes from x into y
+ $ hg graft -r 2
+ grafting 2:* "modify x again" (glob)
+ file 'x' was deleted in local [local] but was modified in other [graft].
+ You can use (c)hanged version, leave (d)eleted, or leave (u)nresolved.
+ What do you want to do? u
+ abort: unresolved conflicts, can't continue
+ (use 'hg resolve' and 'hg graft --continue')
+ [255]
+#endif
+ $ hg co -qC 2
+BROKEN: This should succeed and merge the changes from x into y
+ $ hg graft -r 5
+ grafting 5:* "rename x to y"* (glob)
+ file 'x' was deleted in other [graft] but was modified in local [local].
+ You can use (c)hanged version, (d)elete, or leave (u)nresolved.
+ What do you want to do? u
+ abort: unresolved conflicts, can't continue
+ (use 'hg resolve' and 'hg graft --continue')
+ [255]
+
+Add x, remove it, then add it back, rename x to y from the first commit.
+Similar to the case above, but here the break in history is parallel to the
+rename.
+ $ newrepo
+ $ echo x > x
+ $ hg ci -Aqm 'add x'
+ $ hg rm x
+ $ hg ci -qm 'remove x'
+ $ echo x > x
+ $ hg ci -Aqm 'add x again'
+ $ echo x2 > x
+ $ hg ci -m 'modify x'
+ $ hg co -q 0
+ $ hg mv x y
+ $ hg ci -qm 'rename x to y'
+ $ hg l
+ @ 4 rename x to y
+ | x y
+ | o 3 modify x
+ | | x
+ | o 2 add x again
+ | | x
+ | o 1 remove x
+ |/ x
+ o 0 add x
+ x
+ $ hg debugpathcopies 2 4
+ x -> y
+ $ hg graft -r 3
+ grafting 3:* "modify x" (glob)
+ merging y and x to y
+ $ hg co -qC 3
+ $ hg graft -r 4
+ grafting 4:* "rename x to y" (glob)
+ merging x and y to y
+
+Add x, remove it, then add it back, rename x to y from the first commit.
+Similar to the case above, but here the re-added file's nodeid is different
+from the base.
+ $ newrepo
+ $ echo x > x
+ $ hg ci -Aqm 'add x'
+ $ hg rm x
+ $ hg ci -qm 'remove x'
+ $ echo x2 > x
+ $ hg ci -Aqm 'add x again with different content'
+ $ hg co -q 0
+ $ hg mv x y
+ $ hg ci -qm 'rename x to y'
+ $ hg l
+ @ 3 rename x to y
+ | x y
+ | o 2 add x again with different content
+ | | x
+ | o 1 remove x
+ |/ x
+ o 0 add x
+ x
+ $ hg debugpathcopies 2 3
+ x -> y
+BROKEN: This should merge the changes from x into y
+ $ hg graft -r 2
+ grafting 2:* "add x again with different content" (glob)
+ $ hg co -qC 2
+BROKEN: This should succeed and merge the changes from x into y
+ $ hg graft -r 3
+ grafting 3:* "rename x to y" (glob)
+ file 'x' was deleted in other [graft] but was modified in local [local].
+ You can use (c)hanged version, (d)elete, or leave (u)nresolved.
+ What do you want to do? u
+ abort: unresolved conflicts, can't continue
+ (use 'hg resolve' and 'hg graft --continue')
+ [255]
+
+Add x on two branches, then rename x to y on one side. Similar to the case
+above, but here the break in history is via the base commit.
+ $ newrepo
+ $ echo a > a
+ $ hg ci -Aqm 'base'
+ $ echo x > x
+ $ hg ci -Aqm 'add x'
+ $ echo x2 > x
+ $ hg ci -m 'modify x'
+ $ hg co -q 0
+ $ echo x > x
+ $ hg ci -Aqm 'add x again'
+ $ hg mv x y
+ $ hg ci -qm 'rename x to y'
+ $ hg l
+ @ 4 rename x to y
+ | x y
+ o 3 add x again
+ | x
+ | o 2 modify x
+ | | x
+ | o 1 add x
+ |/ x
+ o 0 base
+ a
+ $ hg debugpathcopies 1 4
+ x -> y
+ $ hg graft -r 2
+ grafting 2:* "modify x" (glob)
+ merging y and x to y
+ $ hg co -qC 2
+ $ hg graft -r 4
+ grafting 4:* "rename x to y"* (glob)
+ merging x and y to y
+
+Add x on two branches, with same content but different history, then rename x
+to y on one side. Similar to the case above, here the file's nodeid is
+different between the branches.
+ $ newrepo
+ $ echo a > a
+ $ hg ci -Aqm 'base'
+ $ echo x > x
+ $ hg ci -Aqm 'add x'
+ $ echo x2 > x
+ $ hg ci -m 'modify x'
+ $ hg co -q 0
+ $ touch x
+ $ hg ci -Aqm 'add empty x'
+# Same content to avoid conflicts
+ $ hg revert -r 1 x
+ $ hg ci -m 'modify x to match commit 1'
+ $ hg mv x y
+ $ hg ci -qm 'rename x to y'
+ $ hg l
+ @ 5 rename x to y
+ | x y
+ o 4 modify x to match commit 1
+ | x
+ o 3 add empty x
+ | x
+ | o 2 modify x
+ | | x
+ | o 1 add x
+ |/ x
+ o 0 base
+ a
+ $ hg debugpathcopies 1 5
+ x -> y (no-filelog !)
+#if filelog
+BROKEN: This should succeed and merge the changes from x into y
+ $ hg graft -r 2
+ grafting 2:* "modify x" (glob)
+ file 'x' was deleted in local [local] but was modified in other [graft].
+ You can use (c)hanged version, leave (d)eleted, or leave (u)nresolved.
+ What do you want to do? u
+ abort: unresolved conflicts, can't continue
+ (use 'hg resolve' and 'hg graft --continue')
+ [255]
+#else
+ $ hg graft -r 2
+ grafting 2:* "modify x" (glob)
+ merging y and x to y
+#endif
+ $ hg co -qC 2
+BROKEN: This should succeed and merge the changes from x into y
+ $ hg graft -r 5
+ grafting 5:* "rename x to y"* (glob)
+ file 'x' was deleted in other [graft] but was modified in local [local].
+ You can use (c)hanged version, (d)elete, or leave (u)nresolved.
+ What do you want to do? u
+ abort: unresolved conflicts, can't continue
+ (use 'hg resolve' and 'hg graft --continue')
+ [255]
+
+Copies via null revision (there shouldn't be any)
+ $ newrepo
+ $ echo x > x
+ $ hg ci -Aqm 'add x'
+ $ hg cp x y
+ $ hg ci -m 'copy x to y'
+ $ hg co -q null
+ $ echo x > x
+ $ hg ci -Aqm 'add x (again)'
+ $ hg l
+ @ 2 add x (again)
+ x
+ o 1 copy x to y
+ | y
+ o 0 add x
+ x
+ $ hg debugpathcopies 1 2
+ $ hg debugpathcopies 2 1
+ $ hg graft -r 1
+ grafting 1:* "copy x to y" (glob)
--- a/tests/test-copies.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-copies.t Mon Jul 22 14:00:33 2019 -0400
@@ -96,30 +96,6 @@
Incorrectly doesn't show the rename
$ hg debugpathcopies 0 1
-Copy a file, then delete destination, then copy again. This does not create a new filelog entry.
- $ newrepo
- $ echo x > x
- $ hg ci -Aqm 'add x'
- $ hg cp x y
- $ hg ci -m 'copy x to y'
- $ hg rm y
- $ hg ci -m 'remove y'
- $ hg cp -f x y
- $ hg ci -m 'copy x onto y (again)'
- $ hg l
- @ 3 copy x onto y (again)
- | y
- o 2 remove y
- | y
- o 1 copy x to y
- | y
- o 0 add x
- x
- $ hg debugp1copies -r 3
- x -> y
- $ hg debugpathcopies 0 3
- x -> y
-
Rename file in a loop: x->y->z->x
$ newrepo
$ echo x > x
@@ -144,29 +120,6 @@
x
$ hg debugpathcopies 0 3
-Copy x to y, then remove y, then add back y. With copy metadata in the changeset, this could easily
-end up reporting y as copied from x (if we don't unmark it as a copy when it's removed).
- $ newrepo
- $ echo x > x
- $ hg ci -Aqm 'add x'
- $ hg mv x y
- $ hg ci -m 'rename x to y'
- $ hg rm y
- $ hg ci -qm 'remove y'
- $ echo x > y
- $ hg ci -Aqm 'add back y'
- $ hg l
- @ 3 add back y
- | y
- o 2 remove y
- | y
- o 1 rename x to y
- | x y
- o 0 add x
- x
- $ hg debugp1copies -r 3
- $ hg debugpathcopies 0 3
-
Copy x to z, then remove z, then copy x2 (same content as x) to z. With copy metadata in the
changeset, the two copies here will have the same filelog entry, so ctx['z'].introrev() might point
to the first commit that added the file. We should still report the copy as being from x2.
@@ -234,25 +187,6 @@
x
$ hg debugpathcopies 1 2
-Copies via null revision (there shouldn't be any)
- $ newrepo
- $ echo x > x
- $ hg ci -Aqm 'add x'
- $ hg cp x y
- $ hg ci -m 'copy x to y'
- $ hg co -q null
- $ echo x > x
- $ hg ci -Aqm 'add x (again)'
- $ hg l
- @ 2 add x (again)
- x
- o 1 copy x to y
- | y
- o 0 add x
- x
- $ hg debugpathcopies 1 2
- $ hg debugpathcopies 2 1
-
Merge rename from other branch
$ newrepo
$ echo x > x
@@ -268,7 +202,7 @@
$ hg ci -m 'merge rename from p2'
$ hg l
@ 3 merge rename from p2
- |\ x
+ |\
| o 2 add z
| | z
o | 1 rename x to y
@@ -420,8 +354,7 @@
$ hg debugpathcopies 1 3
x -> z
-Copy x to y on one side of merge, create y and rename to z on the other side. Pathcopies from the
-first side should not include the y->z rename since y didn't exist in the merge base.
+Copy x to y on one side of merge, create y and rename to z on the other side.
$ newrepo
$ echo x > x
$ hg ci -Aqm 'add x'
@@ -451,9 +384,11 @@
$ hg debugpathcopies 2 3
y -> z
$ hg debugpathcopies 1 3
+ y -> z (no-filelog !)
-Create x and y, then rename x to z on one side of merge, and rename y to z and modify z on the
-other side.
+Create x and y, then rename x to z on one side of merge, and rename y to z and
+modify z on the other side. When storing copies in the changeset, we don't
+filter out copies whose target was created on the other side of the merge.
$ newrepo
$ echo x > x
$ echo y > y
@@ -482,9 +417,9 @@
created new head
$ hg l
@ 5 merge 3 into 1
- |\ y z
+ |\ z
+---o 4 merge 1 into 3
- | |/ x z
+ | |/ z
| o 3 modify z
| | z
| o 2 rename y to z
@@ -494,20 +429,24 @@
o 0 add x and y
x y
$ hg debugpathcopies 1 4
+ y -> z (no-filelog !)
$ hg debugpathcopies 2 4
+ x -> z (no-filelog !)
$ hg debugpathcopies 0 4
x -> z (filelog !)
y -> z (compatibility !)
y -> z (changeset !)
$ hg debugpathcopies 1 5
+ y -> z (no-filelog !)
$ hg debugpathcopies 2 5
+ x -> z (no-filelog !)
$ hg debugpathcopies 0 5
x -> z
-Test for a case in fullcopytracing algorithm where both the merging csets are
-"dirty"; where a dirty cset means that cset is descendant of merge base. This
-test reflect that for this particular case this algorithm correctly find the copies:
+Test for a case in fullcopytracing algorithm where neither of the merging csets
+is a descendant of the merge base. This test reflects that the algorithm
+correctly finds the copies:
$ cat >> $HGRCPATH << EOF
> [experimental]
@@ -550,28 +489,25 @@
Grafting revision 4 on top of revision 2, showing that it respect the rename:
-TODO: Make this work with copy info in changesets (probably by writing a
-changeset-centric version of copies.mergecopies())
-#if no-changeset
$ hg up 2 -q
$ hg graft -r 4 --base 3 --hidden
- grafting 4:af28412ec03c "added d, modified b" (tip)
+ grafting 4:af28412ec03c "added d, modified b" (tip) (no-changeset !)
+ grafting 4:6325ca0b7a1c "added d, modified b" (tip) (changeset !)
merging b1 and b to b1
$ hg l -l1 -p
@ 5 added d, modified b
| b1
- ~ diff -r 5a4825cc2926 -r 94a2f1a0e8e2 b1
+ ~ diff -r 5a4825cc2926 -r 94a2f1a0e8e2 b1 (no-changeset !)
+ ~ diff -r 0a0ed3b3251c -r d544fb655520 b1 (changeset !)
--- a/b1 Thu Jan 01 00:00:00 1970 +0000
+++ b/b1 Thu Jan 01 00:00:00 1970 +0000
@@ -1,1 +1,2 @@
b
+baba
-#endif
-
-Test to make sure that fullcopytracing algorithm don't fail when both the merging csets are dirty
-(a dirty cset is one who is not the descendant of merge base)
+Test to make sure that fullcopytracing algorithm doesn't fail when neither of the
+merging csets is a descendant of the base.
-------------------------------------------------------------------------------------------------
$ newrepo
@@ -623,7 +559,8 @@
a
$ hg rebase -r . -d 2 -t :other
- rebasing 5:5018b1509e94 "added willconflict and d" (tip)
+ rebasing 5:5018b1509e94 "added willconflict and d" (tip) (no-changeset !)
+ rebasing 5:af8d273bf580 "added willconflict and d" (tip) (changeset !)
$ hg up 3 -q
$ hg l --hidden
@@ -642,8 +579,9 @@
o 0 added a
a
-Now if we trigger a merge between cset revision 3 and 6 using base revision 4, in this case
-both the merging csets will be dirty as no one is descendent of base revision:
+Now if we trigger a merge between revision 3 and 6 using base revision 4,
+neither of the merging csets will be a descendant of the base revision:
$ hg graft -r 6 --base 4 --hidden -t :other
- grafting 6:99802e4f1e46 "added willconflict and d" (tip)
+ grafting 6:99802e4f1e46 "added willconflict and d" (tip) (no-changeset !)
+ grafting 6:b19f0df72728 "added willconflict and d" (tip) (changeset !)
--- a/tests/test-copy-move-merge.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-copy-move-merge.t Mon Jul 22 14:00:33 2019 -0400
@@ -23,7 +23,6 @@
$ hg ci -qAm "other"
$ hg merge --debug
- searching for copies back to rev 1
unmatched files in other:
b
c
@@ -89,8 +88,8 @@
> EOF
rebasing 2:add3f11052fa "other" (tip)
file 'a' was deleted in local [dest] but was modified in other [source].
- What do you want to do?
- use (c)hanged version, leave (d)eleted, or leave (u)nresolved? c
+ You can use (c)hanged version, leave (d)eleted, or leave (u)nresolved.
+ What do you want to do? c
$ cat b
1
--- a/tests/test-copytrace-heuristics.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-copytrace-heuristics.t Mon Jul 22 14:00:33 2019 -0400
@@ -15,7 +15,9 @@
$ cat >> $HGRCPATH << EOF
> [extensions]
> rebase=
- > shelve=
+ > [alias]
+ > l = log -G -T 'rev: {rev}\ndesc: {desc}\n'
+ > pl = log -G -T 'rev: {rev}, phase: {phase}\ndesc: {desc}\n'
> EOF
NOTE: calling initclient() set copytrace.sourcecommitlimit=-1 as we want to
@@ -43,13 +45,13 @@
$ echo b > dir/file.txt
$ hg ci -qm 'mod a, mod dir/file.txt'
- $ hg log -G -T 'changeset: {node}\n desc: {desc}\n'
- @ changeset: 557f403c0afd2a3cf15d7e2fb1f1001a8b85e081
- | desc: mod a, mod dir/file.txt
- | o changeset: 928d74bc9110681920854d845c06959f6dfc9547
- |/ desc: mv a b, mv dir/ dir2/
- o changeset: 3c482b16e54596fed340d05ffaf155f156cda7ee
- desc: initial
+ $ hg l
+ @ rev: 2
+ | desc: mod a, mod dir/file.txt
+ | o rev: 1
+ |/ desc: mv a b, mv dir/ dir2/
+ o rev: 0
+ desc: initial
$ hg rebase -s . -d 1
rebasing 2:557f403c0afd "mod a, mod dir/file.txt" (tip)
@@ -76,19 +78,19 @@
$ printf 'somecontent\nmoarcontent' > a
$ hg ci -qm 'mode a'
- $ hg log -G -T 'changeset: {node}\n desc: {desc}\n'
- @ changeset: d526312210b9e8f795d576a77dc643796384d86e
- | desc: mode a
- | o changeset: 46985f76c7e5e5123433527f5c8526806145650b
- |/ desc: rm a, add b
- o changeset: e5b71fb099c29d9172ef4a23485aaffd497e4cc0
- desc: initial
+ $ hg l
+ @ rev: 2
+ | desc: mode a
+ | o rev: 1
+ |/ desc: rm a, add b
+ o rev: 0
+ desc: initial
$ hg rebase -s . -d 1
rebasing 2:d526312210b9 "mode a" (tip)
file 'a' was deleted in local [dest] but was modified in other [source].
- What do you want to do?
- use (c)hanged version, leave (d)eleted, or leave (u)nresolved? u
+ You can use (c)hanged version, leave (d)eleted, or leave (u)nresolved.
+ What do you want to do? u
unresolved conflicts (see hg resolve, then hg rebase --continue)
[1]
@@ -113,15 +115,15 @@
$ echo b > a
$ hg ci -qm 'mod a'
- $ hg log -G -T 'changeset: {node}\n desc: {desc}, phase: {phase}\n'
- @ changeset: 9d5cf99c3d9f8e8b05ba55421f7f56530cfcf3bc
- | desc: mod a, phase: draft
- | o changeset: d760186dd240fc47b91eb9f0b58b0002aaeef95d
- |/ desc: mv a b, phase: draft
- o changeset: 48e1b6ba639d5d7fb313fa7989eebabf99c9eb83
- | desc: randomcommit, phase: draft
- o changeset: e5b71fb099c29d9172ef4a23485aaffd497e4cc0
- desc: initial, phase: draft
+ $ hg pl
+ @ rev: 3, phase: draft
+ | desc: mod a
+ | o rev: 2, phase: draft
+ |/ desc: mv a b
+ o rev: 1, phase: draft
+ | desc: randomcommit
+ o rev: 0, phase: draft
+ desc: initial
$ hg rebase -s . -d 2
rebasing 3:9d5cf99c3d9f "mod a" (tip)
@@ -148,15 +150,15 @@
$ echo b > b
$ hg ci -qm 'mod b'
- $ hg log -G -T 'changeset: {node}\n desc: {desc}\n'
- @ changeset: fbe97126b3969056795c462a67d93faf13e4d298
- | desc: mod b
- o changeset: d760186dd240fc47b91eb9f0b58b0002aaeef95d
- | desc: mv a b
- o changeset: 48e1b6ba639d5d7fb313fa7989eebabf99c9eb83
- | desc: randomcommit
- o changeset: e5b71fb099c29d9172ef4a23485aaffd497e4cc0
- desc: initial
+ $ hg l
+ @ rev: 3
+ | desc: mod b
+ o rev: 2
+ | desc: mv a b
+ o rev: 1
+ | desc: randomcommit
+ o rev: 0
+ desc: initial
$ hg rebase -s . -d 0
rebasing 3:fbe97126b396 "mod b" (tip)
@@ -185,15 +187,15 @@
$ echo b > dir/a
$ hg ci -qm 'mod dir/a'
- $ hg log -G -T 'changeset: {node}\n desc: {desc}\n'
- @ changeset: 6b2f4cece40fd320f41229f23821256ffc08efea
- | desc: mod dir/a
- | o changeset: 4494bf7efd2e0dfdd388e767fb913a8a3731e3fa
- | | desc: create dir2/a
- | o changeset: b1784dfab6ea6bfafeb11c0ac50a2981b0fe6ade
- |/ desc: mv dir/a dir/b
- o changeset: 36859b8907c513a3a87ae34ba5b1e7eea8c20944
- desc: initial
+ $ hg l
+ @ rev: 3
+ | desc: mod dir/a
+ | o rev: 2
+ | | desc: create dir2/a
+ | o rev: 1
+ |/ desc: mv dir/a dir/b
+ o rev: 0
+ desc: initial
$ hg rebase -s . -d 2
rebasing 3:6b2f4cece40f "mod dir/a" (tip)
@@ -230,13 +232,13 @@
$ hg ci -m 'mod a'
created new head
- $ hg log -G -T 'changeset: {node}\n desc: {desc}\n'
- @ changeset: ef716627c70bf4ca0bdb623cfb0d6fe5b9acc51e
- | desc: mod a
- | o changeset: 8329d5c6bf479ec5ca59b9864f3f45d07213f5a4
- |/ desc: mv a foo, add many files
- o changeset: 1451231c87572a7d3f92fc210b4b35711c949a98
- desc: initial
+ $ hg l
+ @ rev: 2
+ | desc: mod a
+ | o rev: 1
+ |/ desc: mv a foo, add many files
+ o rev: 0
+ desc: initial
With small limit
@@ -244,8 +246,8 @@
rebasing 2:ef716627c70b "mod a" (tip)
skipping copytracing for 'a', more candidates than the limit: 7
file 'a' was deleted in local [dest] but was modified in other [source].
- What do you want to do?
- use (c)hanged version, leave (d)eleted, or leave (u)nresolved? u
+ You can use (c)hanged version, leave (d)eleted, or leave (u)nresolved.
+ What do you want to do? u
unresolved conflicts (see hg resolve, then hg rebase --continue)
[1]
@@ -278,13 +280,13 @@
$ hg ci -m 'del a'
created new head
- $ hg log -G -T 'changeset: {node}\n desc: {desc}, phase: {phase}\n'
- @ changeset: 7d61ee3b1e48577891a072024968428ba465c47b
- | desc: del a, phase: draft
- | o changeset: 472e38d57782172f6c6abed82a94ca0d998c3a22
- |/ desc: mv a b, phase: draft
- o changeset: 1451231c87572a7d3f92fc210b4b35711c949a98
- desc: initial, phase: draft
+ $ hg pl
+ @ rev: 2, phase: draft
+ | desc: del a
+ | o rev: 1, phase: draft
+ |/ desc: mv a b
+ o rev: 0, phase: draft
+ desc: initial
$ hg rebase -s 1 -d 2
rebasing 1:472e38d57782 "mv a b"
@@ -311,13 +313,13 @@
$ hg mv -q dir/ dir2
$ hg ci -qm 'mv dir/ dir2/'
- $ hg log -G -T 'changeset: {node}\n desc: {desc}\n'
- @ changeset: a33d80b6e352591dfd82784e1ad6cdd86b25a239
- | desc: mv dir/ dir2/
- | o changeset: 6b2f4cece40fd320f41229f23821256ffc08efea
- |/ desc: mod dir/a
- o changeset: 36859b8907c513a3a87ae34ba5b1e7eea8c20944
- desc: initial
+ $ hg l
+ @ rev: 2
+ | desc: mv dir/ dir2/
+ | o rev: 1
+ |/ desc: mod dir/a
+ o rev: 0
+ desc: initial
$ hg rebase -s . -d 1
rebasing 2:a33d80b6e352 "mv dir/ dir2/" (tip)
@@ -345,15 +347,15 @@
$ hg ci -m 'mod a'
created new head
- $ hg log -G -T 'changeset: {node}\n desc: {desc}\n'
- @ changeset: d413169422167a3fa5275fc5d71f7dea9f5775f3
- | desc: mod a
- | o changeset: d3efd280421d24f9f229997c19e654761c942a71
- | | desc: mv b c
- | o changeset: 472e38d57782172f6c6abed82a94ca0d998c3a22
- |/ desc: mv a b
- o changeset: 1451231c87572a7d3f92fc210b4b35711c949a98
- desc: initial
+ $ hg l
+ @ rev: 3
+ | desc: mod a
+ | o rev: 2
+ | | desc: mv b c
+ | o rev: 1
+ |/ desc: mv a b
+ o rev: 0
+ desc: initial
$ hg rebase -s . -d 2
rebasing 3:d41316942216 "mod a" (tip)
merging c and a to c
@@ -379,15 +381,15 @@
$ echo c > a
$ hg ci -m 'mod a'
created new head
- $ hg log -G -T 'changeset: {node}\n desc: {desc}\n'
- @ changeset: d413169422167a3fa5275fc5d71f7dea9f5775f3
- | desc: mod a
- | o changeset: d3efd280421d24f9f229997c19e654761c942a71
- | | desc: mv b c
- | o changeset: 472e38d57782172f6c6abed82a94ca0d998c3a22
- |/ desc: mv a b
- o changeset: 1451231c87572a7d3f92fc210b4b35711c949a98
- desc: initial
+ $ hg l
+ @ rev: 3
+ | desc: mod a
+ | o rev: 2
+ | | desc: mv b c
+ | o rev: 1
+ |/ desc: mv a b
+ o rev: 0
+ desc: initial
$ hg rebase -s 1 -d .
rebasing 1:472e38d57782 "mv a b"
merging a and b to b
@@ -417,15 +419,15 @@
$ hg ci -m 'mod a'
created new head
- $ hg log -G -T 'changeset: {node}\n desc: {desc}\n'
- @ changeset: ef716627c70bf4ca0bdb623cfb0d6fe5b9acc51e
- | desc: mod a
- | o changeset: b1a6187e79fbce851bb584eadcb0cc4a80290fd9
- | | desc: add c
- | o changeset: 472e38d57782172f6c6abed82a94ca0d998c3a22
- |/ desc: mv a b
- o changeset: 1451231c87572a7d3f92fc210b4b35711c949a98
- desc: initial
+ $ hg l
+ @ rev: 3
+ | desc: mod a
+ | o rev: 2
+ | | desc: add c
+ | o rev: 1
+ |/ desc: mv a b
+ o rev: 0
+ desc: initial
$ hg rebase -s . -d 2
rebasing 3:ef716627c70b "mod a" (tip)
@@ -455,13 +457,13 @@
created new head
$ hg up -q 2
- $ hg log -G -T 'changeset: {node}\n desc: {desc}\n'
- @ changeset: 472e38d57782172f6c6abed82a94ca0d998c3a22
- | desc: mv a b
- | o changeset: b0357b07f79129a3d08a68621271ca1352ae8a09
- |/ desc: modify a
- o changeset: 1451231c87572a7d3f92fc210b4b35711c949a98
- desc: initial
+ $ hg l
+ @ rev: 2
+ | desc: mv a b
+ | o rev: 1
+ |/ desc: modify a
+ o rev: 0
+ desc: initial
$ hg merge 1
merging b and a to b
@@ -490,13 +492,13 @@
$ hg ci -m 'mod a'
created new head
- $ hg log -G -T 'changeset: {node}\n desc: {desc}\n'
- @ changeset: ef716627c70bf4ca0bdb623cfb0d6fe5b9acc51e
- | desc: mod a
- | o changeset: 4fc3fd13fbdb89ada6b75bfcef3911a689a0dde8
- |/ desc: cp a c, mv a b
- o changeset: 1451231c87572a7d3f92fc210b4b35711c949a98
- desc: initial
+ $ hg l
+ @ rev: 2
+ | desc: mod a
+ | o rev: 1
+ |/ desc: cp a c, mv a b
+ o rev: 0
+ desc: initial
$ hg rebase -s . -d 1
rebasing 2:ef716627c70b "mod a" (tip)
@@ -530,32 +532,32 @@
$ hg mv b c
$ hg ci -qm 'mv b c'
$ hg up -q 1
- $ hg log -G -T 'changeset: {node}\n desc: {desc}\n'
- o changeset: d3efd280421d24f9f229997c19e654761c942a71
- | desc: mv b c
- o changeset: 472e38d57782172f6c6abed82a94ca0d998c3a22
- | desc: mv a b
- | @ changeset: ef716627c70bf4ca0bdb623cfb0d6fe5b9acc51e
- |/ desc: mod a
- o changeset: 1451231c87572a7d3f92fc210b4b35711c949a98
- desc: initial
+ $ hg l
+ o rev: 3
+ | desc: mv b c
+ o rev: 2
+ | desc: mv a b
+ | @ rev: 1
+ |/ desc: mod a
+ o rev: 0
+ desc: initial
$ hg merge 3
merging a and c to c
0 files updated, 1 files merged, 0 files removed, 0 files unresolved
(branch merge, don't forget to commit)
$ hg ci -qm 'merge'
- $ hg log -G -T 'changeset: {node}\n desc: {desc}, phase: {phase}\n'
- @ changeset: cd29b0d08c0f39bfed4cde1b40e30f419db0c825
- |\ desc: merge, phase: draft
- | o changeset: d3efd280421d24f9f229997c19e654761c942a71
- | | desc: mv b c, phase: draft
- | o changeset: 472e38d57782172f6c6abed82a94ca0d998c3a22
- | | desc: mv a b, phase: draft
- o | changeset: ef716627c70bf4ca0bdb623cfb0d6fe5b9acc51e
- |/ desc: mod a, phase: draft
- o changeset: 1451231c87572a7d3f92fc210b4b35711c949a98
- desc: initial, phase: draft
+ $ hg pl
+ @ rev: 4, phase: draft
+ |\ desc: merge
+ | o rev: 3, phase: draft
+ | | desc: mv b c
+ | o rev: 2, phase: draft
+ | | desc: mv a b
+ o | rev: 1, phase: draft
+ |/ desc: mod a
+ o rev: 0, phase: draft
+ desc: initial
$ ls
c
$ cd ..
@@ -577,11 +579,11 @@
$ hg mv a b
$ hg ci -m 'mv a b'
- $ hg log -G -T 'changeset: {node}\n desc: {desc}\n'
- @ changeset: 472e38d57782172f6c6abed82a94ca0d998c3a22
- | desc: mv a b
- o changeset: 1451231c87572a7d3f92fc210b4b35711c949a98
- desc: initial
+ $ hg l
+ @ rev: 1
+ | desc: mv a b
+ o rev: 0
+ desc: initial
$ hg unshelve
unshelving change 'default'
rebasing shelved changes
@@ -614,13 +616,13 @@
$ cd ..
$ hg ci -qm 'mod a'
- $ hg log -G -T 'changeset {node}\n desc {desc}, phase: {phase}\n'
- @ changeset 6207d2d318e710b882e3d5ada2a89770efc42c96
- | desc mod a, phase: draft
- | o changeset abffdd4e3dfc04bc375034b970299b2a309a1cce
- |/ desc mv a b; mv dir1 dir2, phase: draft
- o changeset 81973cd24b58db2fdf18ce3d64fb2cc3284e9ab3
- desc initial, phase: draft
+ $ hg pl
+ @ rev: 2, phase: draft
+ | desc: mod a
+ | o rev: 1, phase: draft
+ |/ desc: mv a b; mv dir1 dir2
+ o rev: 0, phase: draft
+ desc: initial
$ hg rebase -s . -d 1 --config experimental.copytrace.sourcecommitlimit=100
rebasing 2:6207d2d318e7 "mod a" (tip)
@@ -652,13 +654,13 @@
$ hg mv -q dir1 dir2
$ hg ci -qm 'mv dir1 dir2'
- $ hg log -G -T 'changeset {node}\n desc {desc}, phase: {phase}\n'
- @ changeset e8919e7df8d036e07b906045eddcd4a42ff1915f
- | desc mv dir1 dir2, phase: draft
- | o changeset 7c7c6f339be00f849c3cb2df738ca91db78b32c8
- |/ desc hg add dir1/a, phase: draft
- o changeset a235dcce55dcf42034c4e374cb200662d0bb4a13
- desc initial, phase: draft
+ $ hg pl
+ @ rev: 2, phase: draft
+ | desc: mv dir1 dir2
+ | o rev: 1, phase: draft
+ |/ desc: hg add dir1/a
+ o rev: 0, phase: draft
+ desc: initial
$ hg rebase -s . -d 1 --config experimental.copytrace.sourcecommitlimit=100
rebasing 2:e8919e7df8d0 "mv dir1 dir2" (tip)
@@ -685,23 +687,23 @@
$ mkdir foo
$ hg mv a foo/bar
$ hg ci -m "Moved a to foo/bar"
- $ hg log -G -T 'changeset {node}\n desc {desc}, phase: {phase}\n'
- @ changeset b4b0f7880e500b5c364a5f07b4a2b167de7a6fb0
- | desc Moved a to foo/bar, phase: draft
- o changeset 5f6d8a4bf34ab274ccc9f631c2536964b8a3666d
- | desc added b, phase: draft
- | o changeset 8b6e13696c38e8445a759516474640c2f8dddef6
- |/ desc added more things to a, phase: draft
- o changeset 9092f1db7931481f93b37d5c9fbcfc341bcd7318
- desc added a, phase: draft
+ $ hg pl
+ @ rev: 3, phase: draft
+ | desc: Moved a to foo/bar
+ o rev: 2, phase: draft
+ | desc: added b
+ | o rev: 1, phase: draft
+ |/ desc: added more things to a
+ o rev: 0, phase: draft
+ desc: added a
When the sourcecommitlimit is small and we have more drafts, we use heuristics only
- $ hg rebase -s 8b6e13696 -d .
+ $ hg rebase -s 1 -d .
rebasing 1:8b6e13696c38 "added more things to a"
file 'a' was deleted in local [dest] but was modified in other [source].
- What do you want to do?
- use (c)hanged version, leave (d)eleted, or leave (u)nresolved? u
+ You can use (c)hanged version, leave (d)eleted, or leave (u)nresolved.
+ What do you want to do? u
unresolved conflicts (see hg resolve, then hg rebase --continue)
[1]
@@ -710,7 +712,7 @@
$ hg rebase --abort
rebase aborted
- $ hg rebase -s 8b6e13696 -d . --config experimental.copytrace.sourcecommitlimit=100
+ $ hg rebase -s 1 -d . --config experimental.copytrace.sourcecommitlimit=100
rebasing 1:8b6e13696c38 "added more things to a"
merging foo/bar and a to foo/bar
saved backup bundle to $TESTTMP/repo/repo/repo/.hg/strip-backup/8b6e13696c38-fc14ac83-rebase.hg
--- a/tests/test-debugcommands.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-debugcommands.t Mon Jul 22 14:00:33 2019 -0400
@@ -545,6 +545,7 @@
.hg/cache/tags2
.hg/cache/rbc-revs-v1
.hg/cache/rbc-names-v1
+ .hg/cache/hgtagsfnodes1
.hg/cache/branch2-served
Test debugcolor
--- a/tests/test-diff-color.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-diff-color.t Mon Jul 22 14:00:33 2019 -0400
@@ -171,7 +171,8 @@
a
a
c
- \x1b[0;33mrecord this change to 'a'? [Ynesfdaq?]\x1b[0m y (esc)
+ \x1b[0;33mrecord this change to 'a'?\x1b[0m (esc)
+ \x1b[0;33m(enter ? for help) [Ynesfdaq?]\x1b[0m y (esc)
$ echo "[extensions]" >> $HGRCPATH
@@ -190,7 +191,8 @@
\x1b[0;36;1mold mode 100644\x1b[0m (esc)
\x1b[0;36;1mnew mode 100755\x1b[0m (esc)
1 hunks, 1 lines changed
- \x1b[0;33mexamine changes to 'a'? [Ynesfdaq?]\x1b[0m y (esc)
+ \x1b[0;33mexamine changes to 'a'?\x1b[0m (esc)
+ \x1b[0;33m(enter ? for help) [Ynesfdaq?]\x1b[0m y (esc)
\x1b[0;35m@@ -2,7 +2,7 @@ c\x1b[0m (esc)
c
@@ -201,7 +203,8 @@
a
a
c
- \x1b[0;33mrecord this change to 'a'? [Ynesfdaq?]\x1b[0m y (esc)
+ \x1b[0;33mrecord this change to 'a'?\x1b[0m (esc)
+ \x1b[0;33m(enter ? for help) [Ynesfdaq?]\x1b[0m y (esc)
$ hg qpop -a
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-dirstate-race2.t Mon Jul 22 14:00:33 2019 -0400
@@ -0,0 +1,37 @@
+Checking the size/permissions/file-type of files stored in the
+dirstate after an update where the files are changed concurrently
+outside of hg's control.
+
+ $ hg init repo
+ $ cd repo
+ $ echo a > a
+ $ hg commit -qAm _
+ $ echo aa > a
+ $ hg commit -m _
+
+ $ hg debugdirstate --no-dates
+ n 644 3 (set |unset) a (re)
+
+ $ cat >> $TESTTMP/dirstaterace.py << EOF
+ > from mercurial import (
+ > extensions,
+ > merge,
+ > )
+ > def extsetup(ui):
+ > extensions.wrapfunction(merge, 'applyupdates', wrap)
+ > def wrap(orig, *args, **kwargs):
+ > res = orig(*args, **kwargs)
+ > with open("a", "w"):
+ > pass # just truncate the file
+ > return res
+ > EOF
+
+Do an update where file 'a' is changed between hg writing it to disk
+and hg writing the dirstate. The dirstate is correct nonetheless, and
+so hg status correctly shows a as clean.
+
+ $ hg up -r 0 --config extensions.race=$TESTTMP/dirstaterace.py
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ hg debugdirstate --no-dates
+ n 644 2 (set |unset) a (re)
+ $ echo a > a; hg status; hg diff
--- a/tests/test-dirstate.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-dirstate.t Mon Jul 22 14:00:33 2019 -0400
@@ -73,7 +73,7 @@
> merge,
> )
>
- > def wraprecordupdates(orig, repo, actions, branchmerge):
+ > def wraprecordupdates(*args):
> raise error.Abort("simulated error while recording dirstateupdates")
>
> def reposetup(ui, repo):
--- a/tests/test-double-merge.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-double-merge.t Mon Jul 22 14:00:33 2019 -0400
@@ -26,7 +26,6 @@
summary: cp foo bar; change both
$ hg merge --debug
- searching for copies back to rev 1
unmatched files in other:
bar
all copies found (* = to merge, ! = divergent, % = renamed and deleted):
--- a/tests/test-editor-filename.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-editor-filename.t Mon Jul 22 14:00:33 2019 -0400
@@ -47,15 +47,18 @@
> EOF
diff --git a/one b/one
new file mode 100644
- examine changes to 'one'? [Ynesfdaq?] y
+ examine changes to 'one'?
+ (enter ? for help) [Ynesfdaq?] y
@@ -0,0 +1,1 @@
+1
- record change 1/2 to 'one'? [Ynesfdaq?] e
+ record change 1/2 to 'one'?
+ (enter ? for help) [Ynesfdaq?] e
*.diff (glob)
editor exited with exit code 1
- record change 1/2 to 'one'? [Ynesfdaq?] q
+ record change 1/2 to 'one'?
+ (enter ? for help) [Ynesfdaq?] q
abort: user quit
[255]
--- a/tests/test-extdata.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-extdata.t Mon Jul 22 14:00:33 2019 -0400
@@ -13,6 +13,7 @@
> notes = notes.txt
> shelldata = shell:cat extdata.txt | grep 2
> emptygrep = shell:cat extdata.txt | grep empty
+ > badparse = shell:cat badparse.txt
> EOF
$ cat <<'EOF' > extdata.txt
> 2 another comment on 2
@@ -58,6 +59,22 @@
abort: unknown extdata source 'unknown'
[255]
+test a zero-exiting source that emits garbage to confuse the revset parser
+
+ $ cat > badparse.txt <<'EOF'
+ > +---------------------------------------+
+ > 9de260b1e88e
+ > EOF
+
+It might be nice if this error message mentioned where the bad string
+came from (eg line X of extdata source S), but the important thing is
+that we don't crash before we can print the parse error.
+ $ hg log -qr "extdata(badparse)"
+ hg: parse error at 0: not a prefix: +
+ (+---------------------------------------+
+ ^ here)
+ [255]
+
test template support:
$ hg log -r:3 -T "{node|short}{if(extdata('notes'), ' # {extdata('notes')}')}\n"
--- a/tests/test-fastannotate-hg.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-fastannotate-hg.t Mon Jul 22 14:00:33 2019 -0400
@@ -1,6 +1,8 @@
(this file is backported from core hg tests/test-annotate.t)
$ cat >> $HGRCPATH << EOF
+ > [ui]
+ > merge = :merge3
> [diff]
> git=1
> [extensions]
@@ -11,8 +13,6 @@
> mainbranch=.
> EOF
- $ HGMERGE=true; export HGMERGE
-
init
$ hg init repo
@@ -157,8 +157,34 @@
created new head
$ hg merge
merging b
- 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
- (branch merge, don't forget to commit)
+ warning: conflicts while merging b! (edit, then use 'hg resolve --mark')
+ 0 files updated, 0 files merged, 0 files removed, 1 files unresolved
+ use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
+ [1]
+ $ cat b
+ a
+ a
+ a
+ <<<<<<< working copy: 5fbdc1152d97 - test: b2.1
+ b4
+ c
+ b5
+ ||||||| base
+ =======
+ b4
+ b5
+ b6
+ >>>>>>> merge rev: 37ec9f5c3d1f - test: b2
+ $ cat <<EOF > b
+ > a
+ > a
+ > a
+ > b4
+ > c
+ > b5
+ > EOF
+ $ hg resolve --mark -q
+ $ rm b.orig
$ hg ci -mmergeb -d '3 0'
annotate after merge
@@ -247,15 +273,31 @@
> EOF
$ hg ci -mc -d '3 0'
created new head
+Work around the pure version not resolving the conflict like native code
+#if pure
+ $ hg merge
+ merging b
+ warning: conflicts while merging b! (edit, then use 'hg resolve --mark')
+ 0 files updated, 0 files merged, 0 files removed, 1 files unresolved
+ use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
+ [1]
+ $ cat <<EOF > b
+ > a
+ > z
+ > a
+ > b4
+ > c
+ > b5
+ > EOF
+ $ hg resolve -m b
+ (no more unresolved files)
+ $ rm b.orig
+#else
$ hg merge
merging b
0 files updated, 1 files merged, 0 files removed, 0 files unresolved
(branch merge, don't forget to commit)
- $ cat <<EOF >> b
- > b4
- > c
- > b5
- > EOF
+#endif
$ echo d >> b
$ hg ci -mmerge2 -d '4 0'
@@ -745,6 +787,19 @@
$ echo 3 >> a
$ hg commit -m 3 -q
$ hg merge 2 -q
+ warning: conflicts while merging a! (edit, then use 'hg resolve --mark')
+ [1]
+ $ cat a
+ <<<<<<< working copy: 0a068f0261cf - test: 3
+ 1
+ 2
+ 3
+ ||||||| base
+ 1
+ 2
+ =======
+ a
+ >>>>>>> merge rev: 9409851bc20a - test: a
$ cat > a << EOF
> b
> 1
@@ -753,6 +808,7 @@
> a
> EOF
$ hg resolve --mark -q
+ $ rm a.orig
$ hg commit -m m
$ hg annotate a
4: b
--- a/tests/test-fastannotate-perfhack.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-fastannotate-perfhack.t Mon Jul 22 14:00:33 2019 -0400
@@ -5,8 +5,6 @@
> perfhack=1
> EOF
- $ HGMERGE=true; export HGMERGE
-
$ hg init repo
$ cd repo
--- a/tests/test-fastannotate-protocol.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-fastannotate-protocol.t Mon Jul 22 14:00:33 2019 -0400
@@ -7,8 +7,6 @@
> mainbranch=@
> EOF
- $ HGMERGE=true; export HGMERGE
-
setup the server repo
$ hg init repo-server
--- a/tests/test-fastannotate.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-fastannotate.t Mon Jul 22 14:00:33 2019 -0400
@@ -3,8 +3,6 @@
> fastannotate=
> EOF
- $ HGMERGE=true; export HGMERGE
-
$ hg init repo
$ cd repo
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-fix-metadata.t Mon Jul 22 14:00:33 2019 -0400
@@ -0,0 +1,86 @@
+A python hook for "hg fix" that prints out the number of files and revisions
+that were affected, along with which fixer tools were applied. Also checks how
+many times it sees a specific key generated by one of the fixer tools defined
+below.
+
+ $ cat >> $TESTTMP/postfixhook.py <<EOF
+ > import collections
+ > def file(ui, repo, rev=None, path=b'', metadata=None, **kwargs):
+ > ui.status(b'fixed %s in revision %d using %s\n' %
+ > (path, rev, b', '.join(metadata.keys())))
+ > def summarize(ui, repo, replacements=None, wdirwritten=False,
+ > metadata=None, **kwargs):
+ > counts = collections.defaultdict(int)
+ > keys = 0
+ > for fixername, metadatalist in metadata.items():
+ > for metadata in metadatalist:
+ > if metadata is None:
+ > continue
+ > counts[fixername] += 1
+ > if 'key' in metadata:
+ > keys += 1
+ > ui.status(b'saw "key" %d times\n' % (keys,))
+ > for name, count in sorted(counts.items()):
+ > ui.status(b'fixed %d files with %s\n' % (count, name))
+ > if replacements:
+ > ui.status(b'fixed %d revisions\n' % (len(replacements),))
+ > if wdirwritten:
+ > ui.status(b'fixed the working copy\n')
+ > EOF
+
+Some mock output for fixer tools that demonstrate what could go wrong with
+expecting the metadata output format.
+
+ $ printf 'new content\n' > $TESTTMP/missing
+ $ printf 'not valid json\0new content\n' > $TESTTMP/invalid
+ $ printf '{"key": "value"}\0new content\n' > $TESTTMP/valid
+
+Configure some fixer tools based on the output defined above, and enable the
+hooks defined above. Disable parallelism to make output of the parallel file
+processing phase stable.
+
+ $ cat >> $HGRCPATH <<EOF
+ > [extensions]
+ > fix =
+ > [fix]
+ > missing:command=cat $TESTTMP/missing
+ > missing:pattern=missing
+ > missing:metadata=true
+ > invalid:command=cat $TESTTMP/invalid
+ > invalid:pattern=invalid
+ > invalid:metadata=true
+ > valid:command=cat $TESTTMP/valid
+ > valid:pattern=valid
+ > valid:metadata=true
+ > [hooks]
+ > postfixfile = python:$TESTTMP/postfixhook.py:file
+ > postfix = python:$TESTTMP/postfixhook.py:summarize
+ > [worker]
+ > enabled=false
+ > EOF
+
+See what happens when we execute each of the fixer tools. Some print warnings,
+some write back to the file.
+
+ $ hg init repo
+ $ cd repo
+
+ $ printf "old content\n" > invalid
+ $ printf "old content\n" > missing
+ $ printf "old content\n" > valid
+ $ hg add -q
+
+ $ hg fix -w
+ ignored invalid output from fixer tool: invalid
+ ignored invalid output from fixer tool: missing
+ fixed valid in revision 2147483647 using valid
+ saw "key" 1 times
+ fixed 1 files with valid
+ fixed the working copy
+
+ $ cat missing invalid valid
+ old content
+ old content
+ new content
+
+ $ cd ..
--- a/tests/test-fix.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-fix.t Mon Jul 22 14:00:33 2019 -0400
@@ -185,6 +185,36 @@
tool may see different values for the arguments added by the :linerange
suboption.
+ Each fixer tool is allowed to return some metadata in addition to the fixed
+ file content. The metadata must be placed before the file content on stdout,
+ separated from the file content by a zero byte. The metadata is parsed as a
+ JSON value (so, it should be UTF-8 encoded and contain no zero bytes). A fixer
+ tool is expected to produce this metadata encoding if and only if the
+ :metadata suboption is true:
+
+ [fix]
+ tool:command = tool --prepend-json-metadata
+ tool:metadata = true
+
+ The metadata values are passed to hooks, which can be used to print summaries
+ or perform other post-fixing work. The supported hooks are:
+
+ "postfixfile"
+ Run once for each file in each revision where any fixer tools made changes
+ to the file content. Provides "$HG_REV" and "$HG_PATH" to identify the file,
+ and "$HG_METADATA" with a map of fixer names to metadata values from fixer
+ tools that affected the file. Fixer tools that didn't affect the file have a
+ valueof None. Only fixer tools that executed are present in the metadata.
+
+ "postfix"
+ Run once after all files and revisions have been handled. Provides
+ "$HG_REPLACEMENTS" with information about what revisions were created and
+ made obsolete. Provides a boolean "$HG_WDIRWRITTEN" to indicate whether any
+ files in the working copy were updated. Provides a list "$HG_METADATA"
+ mapping fixer tool names to lists of metadata values returned from
+ executions that modified a file. This aggregates the same metadata
+ previously passed to the "postfixfile" hook.
+
list of commands:
fix rewrite file content in changesets or working directory
@@ -804,6 +834,8 @@
(use 'hg rebase --continue' or 'hg rebase --abort')
[255]
+ $ cd ..
+
When fixing a file that was renamed, we should diff against the source of the
rename for incremental fixing and we should correctly reproduce the rename in
the replacement revision.
--- a/tests/test-githelp.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-githelp.t Mon Jul 22 14:00:33 2019 -0400
@@ -219,6 +219,22 @@
$ hg githelp -- git stash drop xyz
hg shelve -d xyz
+githelp for stash list with patch
+ $ hg githelp -- git stash list -p
+ hg shelve -l -p
+
+githelp for stash show
+ $ hg githelp -- git stash show
+ hg shelve --stat
+
+githelp for stash show with patch and name
+ $ hg githelp -- git stash show -p mystash
+ hg shelve -p mystash
+
+githelp for stash clear
+ $ hg githelp -- git stash clear
+ hg shelve --cleanup
+
githelp for whatchanged should show deprecated message
$ hg githelp -- whatchanged -p
this command has been deprecated in the git project, thus isn't supported by this tool
@@ -256,6 +272,10 @@
$ hg githelp -- apply -p 5
hg import --no-commit -p 5
+githelp for apply with prefix directory
+ $ hg githelp -- apply --directory=modules
+ hg import --no-commit --prefix modules
+
git merge-base
$ hg githelp -- git merge-base --is-ancestor
ignoring unknown option --is-ancestor
--- a/tests/test-globalopts.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-globalopts.t Mon Jul 22 14:00:33 2019 -0400
@@ -356,8 +356,10 @@
resolve redo merges or set/view the merge status of files
revert restore files to their checkout state
root print the root (top) of the current working directory
+ shelve save and set aside changes from the working directory
status show changed files in the working directory
summary summarize working directory state
+ unshelve restore a shelved change to the working directory
update update working directory (or switch revisions)
Change import/export:
@@ -486,8 +488,10 @@
resolve redo merges or set/view the merge status of files
revert restore files to their checkout state
root print the root (top) of the current working directory
+ shelve save and set aside changes from the working directory
status show changed files in the working directory
summary summarize working directory state
+ unshelve restore a shelved change to the working directory
update update working directory (or switch revisions)
Change import/export:
--- a/tests/test-glog-beautifygraph.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-glog-beautifygraph.t Mon Jul 22 14:00:33 2019 -0400
@@ -2101,7 +2101,7 @@
$ hg up -q 6
$ hg log -G --git --patch --follow-first e
- \xe2\x97\x8d changeset: 6:fc281d8ff18d (esc)
+ \xe2\x97\x8d changeset: 6:9feeac35a70a (esc)
\xe2\x94\x82\xe2\x95\xb2 tag: tip (esc)
\xe2\x94\x82 \xe2\x95\xa7 parent: 5:99b31f1c2782 (esc)
\xe2\x94\x82 parent: 4:17d952250a9d (esc)
@@ -2148,7 +2148,7 @@
$ hg log -G --template "{rev} {desc|firstline}\n"
\xe2\x97\x8b 8 add g (esc)
\xe2\x94\x82 (esc)
- \xe2\x94\x82 \xe2\x97\x8b 7 Added tag foo-bar for changeset fc281d8ff18d (esc)
+ \xe2\x94\x82 \xe2\x97\x8b 7 Added tag foo-bar for changeset 9feeac35a70a (esc)
\xe2\x94\x82\xe2\x95\xb1 (esc)
\xe2\x97\x8b 6 merge 5 and 4 (esc)
\xe2\x94\x82\xe2\x95\xb2 (esc)
@@ -2311,17 +2311,17 @@
# User test
# Date 0 0
# Thu Jan 01 00:00:00 1970 +0000
- # Node ID fc281d8ff18d999ad6497b3d27390bcd695dcc73
+ # Node ID 9feeac35a70aa325519bbf3178683271113f2b8f
# Parent 99b31f1c2782e2deb1723cef08930f70fc84b37b
# Parent 17d952250a9d03cc3dc77b199ab60e959b9b0260
merge 5 and 4
- diff -r 99b31f1c2782 -r fc281d8ff18d dir/b
+ diff -r 99b31f1c2782 -r 9feeac35a70a dir/b
--- a/dir/b Thu Jan 01 00:00:00 1970 +0000
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,1 +0,0 @@
-a
- diff -r 99b31f1c2782 -r fc281d8ff18d e
+ diff -r 99b31f1c2782 -r 9feeac35a70a e
--- a/e Thu Jan 01 00:00:00 1970 +0000
+++ b/e Thu Jan 01 00:00:00 1970 +0000
@@ -1,1 +1,1 @@
@@ -2331,24 +2331,24 @@
# User test
# Date 0 0
# Thu Jan 01 00:00:00 1970 +0000
- # Node ID 02dbb8e276b8ab7abfd07cab50c901647e75c2dd
- # Parent fc281d8ff18d999ad6497b3d27390bcd695dcc73
- Added tag foo-bar for changeset fc281d8ff18d
+ # Node ID 9febbb9c8b2e09670a2fb550cb1e4e01a2c7e9fd
+ # Parent 9feeac35a70aa325519bbf3178683271113f2b8f
+ Added tag foo-bar for changeset 9feeac35a70a
- diff -r fc281d8ff18d -r 02dbb8e276b8 .hgtags
+ diff -r 9feeac35a70a -r 9febbb9c8b2e .hgtags
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/.hgtags Thu Jan 01 00:00:00 1970 +0000
@@ -0,0 +1,1 @@
- +fc281d8ff18d999ad6497b3d27390bcd695dcc73 foo-bar
+ +9feeac35a70aa325519bbf3178683271113f2b8f foo-bar
# HG changeset patch
# User test
# Date 0 0
# Thu Jan 01 00:00:00 1970 +0000
- # Node ID 24c2e826ddebf80f9dcd60b856bdb8e6715c5449
- # Parent fc281d8ff18d999ad6497b3d27390bcd695dcc73
+ # Node ID 3bd4551ec3fe1c0696241f236abe857a53c6d6e7
+ # Parent 9feeac35a70aa325519bbf3178683271113f2b8f
add g
- diff -r fc281d8ff18d -r 24c2e826ddeb g
+ diff -r 9feeac35a70a -r 3bd4551ec3fe g
--- a/g Thu Jan 01 00:00:00 1970 +0000
+++ b/g Thu Jan 01 00:00:00 1970 +0000
@@ -1,2 +1,1 @@
@@ -2436,7 +2436,7 @@
[]
<spanset- 0:9>
$ hg log -G --template '{rev} {desc}\n'
- \xe2\x97\x8b 7 Added tag foo-bar for changeset fc281d8ff18d (esc)
+ \xe2\x97\x8b 7 Added tag foo-bar for changeset 9feeac35a70a (esc)
\xe2\x94\x82 (esc)
\xe2\x97\x8b 6 merge 5 and 4 (esc)
\xe2\x94\x82\xe2\x95\xb2 (esc)
@@ -2534,9 +2534,9 @@
node template with changesetprinter:
$ hg log -Gqr 5:7 --config ui.graphnodetemplate='"{rev}"'
- 7 7:02dbb8e276b8
+ 7 7:9febbb9c8b2e
\xe2\x94\x82 (esc)
- 6 6:fc281d8ff18d
+ 6 6:9feeac35a70a
\xe2\x94\x82\xe2\x95\xb2 (esc)
\xe2\x94\x82 \xe2\x95\xa7 (esc)
5 5:99b31f1c2782
@@ -2560,7 +2560,7 @@
$ hg log -Gqr 7 --config extensions.color= --color=debug \
> --config ui.graphnodetemplate='{label("branch.{branch}", rev)}'
- [branch.default\xe2\x94\x827] [log.node|7:02dbb8e276b8] (esc)
+ [branch.default\xe2\x94\x827] [log.node|7:9febbb9c8b2e] (esc)
\xe2\x94\x82 (esc)
\xe2\x95\xa7 (esc)
--- a/tests/test-glog-topological.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-glog-topological.t Mon Jul 22 14:00:33 2019 -0400
@@ -114,3 +114,41 @@
|/
o 0
+
+Topological sort can be turned on via config
+
+ $ cat >> $HGRCPATH << EOF
+ > [experimental]
+ > log.topo=true
+ > EOF
+
+ $ hg log -G
+ o 8
+ |
+ o 3
+ |
+ o 2
+ |
+ o 1
+ |
+ | o 7
+ | |
+ | o 6
+ | |
+ | o 5
+ | |
+ | o 4
+ |/
+ o 0
+
+Does not affect non-graph log
+ $ hg log -T '{rev}\n'
+ 8
+ 7
+ 6
+ 5
+ 4
+ 3
+ 2
+ 1
+ 0
--- a/tests/test-glog.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-glog.t Mon Jul 22 14:00:33 2019 -0400
@@ -1951,7 +1951,7 @@
$ hg up -q 6
$ hg log -G --git --patch --follow-first e
- @ changeset: 6:fc281d8ff18d
+ @ changeset: 6:9feeac35a70a
|\ tag: tip
| ~ parent: 5:99b31f1c2782
| parent: 4:17d952250a9d
@@ -1998,7 +1998,7 @@
$ hg log -G --template "{rev} {desc|firstline}\n"
o 8 add g
|
- | o 7 Added tag foo-bar for changeset fc281d8ff18d
+ | o 7 Added tag foo-bar for changeset 9feeac35a70a
|/
o 6 merge 5 and 4
|\
@@ -2161,17 +2161,17 @@
# User test
# Date 0 0
# Thu Jan 01 00:00:00 1970 +0000
- # Node ID fc281d8ff18d999ad6497b3d27390bcd695dcc73
+ # Node ID 9feeac35a70aa325519bbf3178683271113f2b8f
# Parent 99b31f1c2782e2deb1723cef08930f70fc84b37b
# Parent 17d952250a9d03cc3dc77b199ab60e959b9b0260
merge 5 and 4
- diff -r 99b31f1c2782 -r fc281d8ff18d dir/b
+ diff -r 99b31f1c2782 -r 9feeac35a70a dir/b
--- a/dir/b Thu Jan 01 00:00:00 1970 +0000
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,1 +0,0 @@
-a
- diff -r 99b31f1c2782 -r fc281d8ff18d e
+ diff -r 99b31f1c2782 -r 9feeac35a70a e
--- a/e Thu Jan 01 00:00:00 1970 +0000
+++ b/e Thu Jan 01 00:00:00 1970 +0000
@@ -1,1 +1,1 @@
@@ -2181,24 +2181,24 @@
# User test
# Date 0 0
# Thu Jan 01 00:00:00 1970 +0000
- # Node ID 02dbb8e276b8ab7abfd07cab50c901647e75c2dd
- # Parent fc281d8ff18d999ad6497b3d27390bcd695dcc73
- Added tag foo-bar for changeset fc281d8ff18d
+ # Node ID 9febbb9c8b2e09670a2fb550cb1e4e01a2c7e9fd
+ # Parent 9feeac35a70aa325519bbf3178683271113f2b8f
+ Added tag foo-bar for changeset 9feeac35a70a
- diff -r fc281d8ff18d -r 02dbb8e276b8 .hgtags
+ diff -r 9feeac35a70a -r 9febbb9c8b2e .hgtags
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/.hgtags Thu Jan 01 00:00:00 1970 +0000
@@ -0,0 +1,1 @@
- +fc281d8ff18d999ad6497b3d27390bcd695dcc73 foo-bar
+ +9feeac35a70aa325519bbf3178683271113f2b8f foo-bar
# HG changeset patch
# User test
# Date 0 0
# Thu Jan 01 00:00:00 1970 +0000
- # Node ID 24c2e826ddebf80f9dcd60b856bdb8e6715c5449
- # Parent fc281d8ff18d999ad6497b3d27390bcd695dcc73
+ # Node ID 3bd4551ec3fe1c0696241f236abe857a53c6d6e7
+ # Parent 9feeac35a70aa325519bbf3178683271113f2b8f
add g
- diff -r fc281d8ff18d -r 24c2e826ddeb g
+ diff -r 9feeac35a70a -r 3bd4551ec3fe g
--- a/g Thu Jan 01 00:00:00 1970 +0000
+++ b/g Thu Jan 01 00:00:00 1970 +0000
@@ -1,2 +1,1 @@
@@ -2286,7 +2286,7 @@
[]
<spanset- 0:9>
$ hg log -G --template '{rev} {desc}\n'
- o 7 Added tag foo-bar for changeset fc281d8ff18d
+ o 7 Added tag foo-bar for changeset 9feeac35a70a
|
o 6 merge 5 and 4
|\
@@ -2384,9 +2384,9 @@
node template with changesetprinter:
$ hg log -Gqr 5:7 --config ui.graphnodetemplate='"{rev}"'
- 7 7:02dbb8e276b8
+ 7 7:9febbb9c8b2e
|
- 6 6:fc281d8ff18d
+ 6 6:9feeac35a70a
|\
| ~
5 5:99b31f1c2782
@@ -2410,7 +2410,7 @@
$ hg log -Gqr 7 --config extensions.color= --color=debug \
> --config ui.graphnodetemplate='{label("branch.{branch}", rev)}'
- [branch.default|7] [log.node|7:02dbb8e276b8]
+ [branch.default|7] [log.node|7:9febbb9c8b2e]
|
~
@@ -3028,12 +3028,14 @@
date: Thu Jan 01 00:00:04 1970 +0000
summary: (4) merge two known; one immediate left, one immediate right
-Draw only part of a grandparent line differently with "<N><char>"; only the
-last N lines (for positive N) or everything but the first N lines (for
-negative N) along the current node use the style, the rest of the edge uses
-the parent edge styling.
+Previously, one could specify graphstyle.grandparent = <N><char> to draw <char>
+on only the last N lines (for positive N) or everything but the first N lines
+(for negative N), with the rest of the edge using the parent edge styling.
-Last 3 lines:
+This was removed, and this test now shows that muliple characters being
+specified in graphstyle.grandparent aren't treated specially (including in width
+calculations; there's no specific reason to *avoid* handling the width
+calculations, but it's difficult to do correctly and efficiently).
$ cat << EOF >> $HGRCPATH
> [experimental]
@@ -3043,77 +3045,77 @@
> EOF
$ hg log -G -r '36:18 & file("a")' -m
@ changeset: 36:08a19a744424
- ! branch: branch
- ! tag: tip
- ! parent: 35:9159c3644c5e
- ! parent: 35:9159c3644c5e
- ! user: test
- . date: Thu Jan 01 00:00:36 1970 +0000
- . summary: (36) buggy merge: identical parents
- .
+ 3. branch: branch
+ 3. tag: tip
+ 3. parent: 35:9159c3644c5e
+ 3. parent: 35:9159c3644c5e
+ 3. user: test
+ 3. date: Thu Jan 01 00:00:36 1970 +0000
+ 3. summary: (36) buggy merge: identical parents
+ 3.
o changeset: 32:d06dffa21a31
!\ parent: 27:886ed638191b
- ! ! parent: 31:621d83e11f67
- ! ! user: test
- ! . date: Thu Jan 01 00:00:32 1970 +0000
- ! . summary: (32) expand
- ! .
- o ! changeset: 31:621d83e11f67
- !\! parent: 21:d42a756af44d
- ! ! parent: 30:6e11cd4b648f
- ! ! user: test
- ! ! date: Thu Jan 01 00:00:31 1970 +0000
- ! ! summary: (31) expand
- ! !
- o ! changeset: 30:6e11cd4b648f
+ ! 3. parent: 31:621d83e11f67
+ ! 3. user: test
+ ! 3. date: Thu Jan 01 00:00:32 1970 +0000
+ ! 3. summary: (32) expand
+ ! 3.
+ o 3. changeset: 31:621d83e11f67
+ !\3. parent: 21:d42a756af44d
+ ! 3. parent: 30:6e11cd4b648f
+ ! 3. user: test
+ ! 3. date: Thu Jan 01 00:00:31 1970 +0000
+ ! 3. summary: (31) expand
+ ! 3.
+ o 3. changeset: 30:6e11cd4b648f
!\ \ parent: 28:44ecd0b9ae99
- ! ~ ! parent: 29:cd9bb2be7593
- ! ! user: test
- ! ! date: Thu Jan 01 00:00:30 1970 +0000
- ! ! summary: (30) expand
+ ! ~ 3. parent: 29:cd9bb2be7593
+ ! 3. user: test
+ ! 3. date: Thu Jan 01 00:00:30 1970 +0000
+ ! 3. summary: (30) expand
! /
- o ! changeset: 28:44ecd0b9ae99
+ o 3. changeset: 28:44ecd0b9ae99
!\ \ parent: 1:6db2ef61d156
- ! ~ ! parent: 26:7f25b6c2f0b9
- ! ! user: test
- ! ! date: Thu Jan 01 00:00:28 1970 +0000
- ! ! summary: (28) merge zero known
+ ! ~ 3. parent: 26:7f25b6c2f0b9
+ ! 3. user: test
+ ! 3. date: Thu Jan 01 00:00:28 1970 +0000
+ ! 3. summary: (28) merge zero known
! /
- o ! changeset: 26:7f25b6c2f0b9
+ o 3. changeset: 26:7f25b6c2f0b9
!\ \ parent: 18:1aa84d96232a
- ! ! ! parent: 25:91da8ed57247
- ! ! ! user: test
- ! ! ! date: Thu Jan 01 00:00:26 1970 +0000
- ! ! ! summary: (26) merge one known; far right
- ! ! !
- ! o ! changeset: 25:91da8ed57247
- ! !\! parent: 21:d42a756af44d
- ! ! ! parent: 24:a9c19a3d96b7
- ! ! ! user: test
- ! ! ! date: Thu Jan 01 00:00:25 1970 +0000
- ! ! ! summary: (25) merge one known; far left
- ! ! !
- ! o ! changeset: 24:a9c19a3d96b7
+ ! ! 3. parent: 25:91da8ed57247
+ ! ! 3. user: test
+ ! ! 3. date: Thu Jan 01 00:00:26 1970 +0000
+ ! ! 3. summary: (26) merge one known; far right
+ ! ! 3.
+ ! o 3. changeset: 25:91da8ed57247
+ ! !\3. parent: 21:d42a756af44d
+ ! ! 3. parent: 24:a9c19a3d96b7
+ ! ! 3. user: test
+ ! ! 3. date: Thu Jan 01 00:00:25 1970 +0000
+ ! ! 3. summary: (25) merge one known; far left
+ ! ! 3.
+ ! o 3. changeset: 24:a9c19a3d96b7
! !\ \ parent: 0:e6eb3150255d
- ! ! ~ ! parent: 23:a01cddf0766d
- ! ! ! user: test
- ! ! ! date: Thu Jan 01 00:00:24 1970 +0000
- ! ! ! summary: (24) merge one known; immediate right
+ ! ! ~ 3. parent: 23:a01cddf0766d
+ ! ! 3. user: test
+ ! ! 3. date: Thu Jan 01 00:00:24 1970 +0000
+ ! ! 3. summary: (24) merge one known; immediate right
! ! /
- ! o ! changeset: 23:a01cddf0766d
+ ! o 3. changeset: 23:a01cddf0766d
! !\ \ parent: 1:6db2ef61d156
- ! ! ~ ! parent: 22:e0d9cccacb5d
- ! ! ! user: test
- ! ! ! date: Thu Jan 01 00:00:23 1970 +0000
- ! ! ! summary: (23) merge one known; immediate left
+ ! ! ~ 3. parent: 22:e0d9cccacb5d
+ ! ! 3. user: test
+ ! ! 3. date: Thu Jan 01 00:00:23 1970 +0000
+ ! ! 3. summary: (23) merge one known; immediate left
! ! /
- ! o ! changeset: 22:e0d9cccacb5d
- !/!/ parent: 18:1aa84d96232a
- ! ! parent: 21:d42a756af44d
- ! ! user: test
- ! ! date: Thu Jan 01 00:00:22 1970 +0000
- ! ! summary: (22) merge two known; one far left, one far right
- ! !
+ ! o 3. changeset: 22:e0d9cccacb5d
+ !/3./ parent: 18:1aa84d96232a
+ ! 3. parent: 21:d42a756af44d
+ ! 3. user: test
+ ! 3. date: Thu Jan 01 00:00:22 1970 +0000
+ ! 3. summary: (22) merge two known; one far left, one far right
+ ! 3.
! o changeset: 21:d42a756af44d
! !\ parent: 19:31ddc2c1573b
! ! ! parent: 20:d30ed6450e32
@@ -3142,7 +3144,8 @@
date: Thu Jan 01 00:00:18 1970 +0000
summary: (18) merge two known; two far left
-All but the first 3 lines:
+(This formerly tested "All but the first 3 lines", but is now showing that it's
+still not treated any differently):
$ cat << EOF >> $HGRCPATH
> [experimental]
@@ -3152,77 +3155,77 @@
> EOF
$ hg log -G -r '36:18 & file("a")' -m
@ changeset: 36:08a19a744424
- ! branch: branch
- ! tag: tip
- . parent: 35:9159c3644c5e
- . parent: 35:9159c3644c5e
- . user: test
- . date: Thu Jan 01 00:00:36 1970 +0000
- . summary: (36) buggy merge: identical parents
- .
+ -3. branch: branch
+ -3. tag: tip
+ -3. parent: 35:9159c3644c5e
+ -3. parent: 35:9159c3644c5e
+ -3. user: test
+ -3. date: Thu Jan 01 00:00:36 1970 +0000
+ -3. summary: (36) buggy merge: identical parents
+ -3.
o changeset: 32:d06dffa21a31
!\ parent: 27:886ed638191b
- ! ! parent: 31:621d83e11f67
- ! . user: test
- ! . date: Thu Jan 01 00:00:32 1970 +0000
- ! . summary: (32) expand
- ! .
- o ! changeset: 31:621d83e11f67
- !\! parent: 21:d42a756af44d
- ! ! parent: 30:6e11cd4b648f
- ! ! user: test
- ! ! date: Thu Jan 01 00:00:31 1970 +0000
- ! ! summary: (31) expand
- ! !
- o ! changeset: 30:6e11cd4b648f
+ ! -3. parent: 31:621d83e11f67
+ ! -3. user: test
+ ! -3. date: Thu Jan 01 00:00:32 1970 +0000
+ ! -3. summary: (32) expand
+ ! -3.
+ o -3. changeset: 31:621d83e11f67
+ !\-3. parent: 21:d42a756af44d
+ ! -3. parent: 30:6e11cd4b648f
+ ! -3. user: test
+ ! -3. date: Thu Jan 01 00:00:31 1970 +0000
+ ! -3. summary: (31) expand
+ ! -3.
+ o -3. changeset: 30:6e11cd4b648f
!\ \ parent: 28:44ecd0b9ae99
- ! ~ ! parent: 29:cd9bb2be7593
- ! ! user: test
- ! ! date: Thu Jan 01 00:00:30 1970 +0000
- ! ! summary: (30) expand
+ ! ~ -3. parent: 29:cd9bb2be7593
+ ! -3. user: test
+ ! -3. date: Thu Jan 01 00:00:30 1970 +0000
+ ! -3. summary: (30) expand
! /
- o ! changeset: 28:44ecd0b9ae99
+ o -3. changeset: 28:44ecd0b9ae99
!\ \ parent: 1:6db2ef61d156
- ! ~ ! parent: 26:7f25b6c2f0b9
- ! ! user: test
- ! ! date: Thu Jan 01 00:00:28 1970 +0000
- ! ! summary: (28) merge zero known
+ ! ~ -3. parent: 26:7f25b6c2f0b9
+ ! -3. user: test
+ ! -3. date: Thu Jan 01 00:00:28 1970 +0000
+ ! -3. summary: (28) merge zero known
! /
- o ! changeset: 26:7f25b6c2f0b9
+ o -3. changeset: 26:7f25b6c2f0b9
!\ \ parent: 18:1aa84d96232a
- ! ! ! parent: 25:91da8ed57247
- ! ! ! user: test
- ! ! ! date: Thu Jan 01 00:00:26 1970 +0000
- ! ! ! summary: (26) merge one known; far right
- ! ! !
- ! o ! changeset: 25:91da8ed57247
- ! !\! parent: 21:d42a756af44d
- ! ! ! parent: 24:a9c19a3d96b7
- ! ! ! user: test
- ! ! ! date: Thu Jan 01 00:00:25 1970 +0000
- ! ! ! summary: (25) merge one known; far left
- ! ! !
- ! o ! changeset: 24:a9c19a3d96b7
+ ! ! -3. parent: 25:91da8ed57247
+ ! ! -3. user: test
+ ! ! -3. date: Thu Jan 01 00:00:26 1970 +0000
+ ! ! -3. summary: (26) merge one known; far right
+ ! ! -3.
+ ! o -3. changeset: 25:91da8ed57247
+ ! !\-3. parent: 21:d42a756af44d
+ ! ! -3. parent: 24:a9c19a3d96b7
+ ! ! -3. user: test
+ ! ! -3. date: Thu Jan 01 00:00:25 1970 +0000
+ ! ! -3. summary: (25) merge one known; far left
+ ! ! -3.
+ ! o -3. changeset: 24:a9c19a3d96b7
! !\ \ parent: 0:e6eb3150255d
- ! ! ~ ! parent: 23:a01cddf0766d
- ! ! ! user: test
- ! ! ! date: Thu Jan 01 00:00:24 1970 +0000
- ! ! ! summary: (24) merge one known; immediate right
+ ! ! ~ -3. parent: 23:a01cddf0766d
+ ! ! -3. user: test
+ ! ! -3. date: Thu Jan 01 00:00:24 1970 +0000
+ ! ! -3. summary: (24) merge one known; immediate right
! ! /
- ! o ! changeset: 23:a01cddf0766d
+ ! o -3. changeset: 23:a01cddf0766d
! !\ \ parent: 1:6db2ef61d156
- ! ! ~ ! parent: 22:e0d9cccacb5d
- ! ! ! user: test
- ! ! ! date: Thu Jan 01 00:00:23 1970 +0000
- ! ! ! summary: (23) merge one known; immediate left
+ ! ! ~ -3. parent: 22:e0d9cccacb5d
+ ! ! -3. user: test
+ ! ! -3. date: Thu Jan 01 00:00:23 1970 +0000
+ ! ! -3. summary: (23) merge one known; immediate left
! ! /
- ! o ! changeset: 22:e0d9cccacb5d
- !/!/ parent: 18:1aa84d96232a
- ! ! parent: 21:d42a756af44d
- ! ! user: test
- ! ! date: Thu Jan 01 00:00:22 1970 +0000
- ! ! summary: (22) merge two known; one far left, one far right
- ! !
+ ! o -3. changeset: 22:e0d9cccacb5d
+ !/-3./ parent: 18:1aa84d96232a
+ ! -3. parent: 21:d42a756af44d
+ ! -3. user: test
+ ! -3. date: Thu Jan 01 00:00:22 1970 +0000
+ ! -3. summary: (22) merge two known; one far left, one far right
+ ! -3.
! o changeset: 21:d42a756af44d
! !\ parent: 19:31ddc2c1573b
! ! ! parent: 20:d30ed6450e32
--- a/tests/test-graft.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-graft.t Mon Jul 22 14:00:33 2019 -0400
@@ -1,9 +1,18 @@
+#testcases abortcommand abortflag
+
$ cat >> $HGRCPATH <<EOF
> [extdiff]
> # for portability:
> pdiff = sh "$RUNTESTDIR/pdiff"
> EOF
+#if abortflag
+ $ cat >> $HGRCPATH <<EOF
+ > [alias]
+ > abort = graft --abort
+ > EOF
+#endif
+
Create a repo with some stuff in it:
$ hg init a
@@ -75,6 +84,8 @@
$ hg graft -r 2 --base 3
grafting 2:5c095ad7e90f "2"
+ note: possible conflict - c was deleted and renamed to:
+ a
note: graft of 2:5c095ad7e90f created no changes to commit
Can't continue without starting:
@@ -199,7 +210,6 @@
scanning for duplicate grafts
skipping revision 2:5c095ad7e90f (already grafted to 7:ef0ef43d49e7)
grafting 1:5d205f8b35b6 "1"
- searching for copies back to rev 1
unmatched files in local:
b
all copies found (* = to merge, ! = divergent, % = renamed and deleted):
@@ -221,9 +231,9 @@
committing changelog
updating the branch cache
grafting 5:97f8bfe72746 "5"
- searching for copies back to rev 1
- unmatched files in other (from topological common ancestor):
- c
+ all copies found (* = to merge, ! = divergent, % = renamed and deleted):
+ src: 'c' -> dst: 'b'
+ checking for directory renames
resolving manifests
branchmerge: True, force: True, partial: False
ancestor: 4c60f11aa304, local: 6b9e5368ca4e+, remote: 97f8bfe72746
@@ -237,9 +247,9 @@
$ HGEDITOR=cat hg graft 4 3 --log --debug
scanning for duplicate grafts
grafting 4:9c233e8e184d "4"
- searching for copies back to rev 1
- unmatched files in other (from topological common ancestor):
- c
+ all copies found (* = to merge, ! = divergent, % = renamed and deleted):
+ src: 'c' -> dst: 'b'
+ checking for directory renames
resolving manifests
branchmerge: True, force: True, partial: False
ancestor: 4c60f11aa304, local: 1905859650ec+, remote: 9c233e8e184d
@@ -280,6 +290,7 @@
# To continue: hg graft --continue
# To abort: hg graft --abort
+ # To stop: hg graft --stop
Commit while interrupted should fail:
@@ -744,11 +755,9 @@
$ hg graft -q 13 --debug
scanning for duplicate grafts
grafting 13:7a4785234d87 "2"
- searching for copies back to rev 12
- unmatched files in other (from topological common ancestor):
- g
- unmatched files new in both:
- b
+ all copies found (* = to merge, ! = divergent, % = renamed and deleted):
+ src: 'a' -> dst: 'b' *
+ checking for directory renames
resolving manifests
branchmerge: True, force: True, partial: False
ancestor: b592ea63bb0c, local: 7e61b508e709+, remote: 7a4785234d87
@@ -756,7 +765,7 @@
committing files:
b
warning: can't find ancestor for 'b' copied from 'a'!
- reusing manifest form p1 (listed files actually unchanged)
+ reusing manifest from p1 (listed files actually unchanged)
committing changelog
updating the branch cache
$ hg log -r 'destination(13)'
@@ -969,7 +978,6 @@
A.4 has a degenerate case a<-b<-a->a, where checkcopies isn't needed at all.
A.5 has a special case a<-b<-b->a, which is treated like a<-b->a in a merge.
A.5 has issue5343 as a special case.
-TODO: add test coverage for A.5
A.6 has a special case a<-a<-b->a. Here, checkcopies will find a spurious
incomplete divergence, which is in fact complete. This is handled later in
mergecopies.
@@ -1072,13 +1080,32 @@
$ hg mv f4a f4e
$ hg mv f5a f5b
$ hg ci -qAm "E0"
+ $ hg up -q "min(desc("A0"))"
+ $ hg cp f1a f1f
+ $ hg ci -qAm "F0"
+ $ hg up -q "min(desc("A0"))"
+ $ hg cp f1a f1g
+ $ echo c1g > f1g
+ $ hg ci -qAm "G0"
$ hg log -G
- @ changeset: 6:6bd1736cab86
+ @ changeset: 8:ba67f08fb15a
| tag: tip
| parent: 0:11f7a1b56675
| user: test
| date: Thu Jan 01 00:00:00 1970 +0000
- | summary: E0
+ | summary: G0
+ |
+ | o changeset: 7:d376ab0d7fda
+ |/ parent: 0:11f7a1b56675
+ | user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | summary: F0
+ |
+ | o changeset: 6:6bd1736cab86
+ |/ parent: 0:11f7a1b56675
+ | user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | summary: E0
|
| o changeset: 5:560daee679da
| | user: test
@@ -1115,11 +1142,11 @@
Test the cases A.4 (f1x), the "ping-pong" special case of A.7 (f5x),
and A.3 with a local content change to be preserved (f2x).
+ $ hg up -q "desc("E0")"
$ HGEDITOR="echo C2 >" hg graft -r 'desc("C0")' --edit
grafting 2:f58c7e2b28fa "C0"
merging f1e and f1b to f1e
merging f2a and f2c to f2c
- merging f5b and f5a to f5a
Test the cases A.1 (f4x) and A.7 (f3x).
@@ -1131,93 +1158,129 @@
merging f4e and f4a to f4e
warning: can't find ancestor for 'f3d' copied from 'f3b'!
+ $ hg cat f2c
+ c2e
+
+Test the case A.5 (move case, f1x).
+
+ $ hg up -q "desc("C0")"
+BROKEN: Shouldn't get the warning about missing ancestor
+ $ HGEDITOR="echo E1 >" hg graft -r 'desc("E0")' --edit
+ grafting 6:6bd1736cab86 "E0"
+ note: possible conflict - f1a was renamed multiple times to:
+ f1b
+ f1e
+ note: possible conflict - f3a was renamed multiple times to:
+ f3b
+ f3e
+ merging f2c and f2a to f2c
+ merging f5a and f5b to f5b
+ warning: can't find ancestor for 'f1e' copied from 'f1a'!
+ warning: can't find ancestor for 'f3e' copied from 'f3a'!
+ $ cat f1e
+ c1a
+
+Test the case A.5 (copy case, f1x).
+
+ $ hg up -q "desc("C0")"
+BROKEN: Shouldn't get the warning about missing ancestor
+ $ HGEDITOR="echo F1 >" hg graft -r 'desc("F0")' --edit
+ grafting 7:d376ab0d7fda "F0"
+ warning: can't find ancestor for 'f1f' copied from 'f1a'!
+BROKEN: f1f should be marked a copy from f1b
+ $ hg st --copies --change .
+ A f1f
+BROKEN: f1f should have the new content from f1b (i.e. "c1c")
+ $ cat f1f
+ c1a
+
+Test the case A.5 (copy+modify case, f1x).
+
+ $ hg up -q "desc("C0")"
+BROKEN: We should get a merge conflict from the 3-way merge between f1b in C0
+(content "c1c") and f1g in G0 (content "c1g") with f1a in A0 as base (content
+"c1a")
+ $ HGEDITOR="echo G1 >" hg graft -r 'desc("G0")' --edit
+ grafting 8:ba67f08fb15a "G0"
+ warning: can't find ancestor for 'f1g' copied from 'f1a'!
+
Check the results of the grafts tested
$ hg log -CGv --patch --git
- @ changeset: 8:93ee502e8b0a
+ @ changeset: 13:ef3adf6c20a4
| tag: tip
+ | parent: 2:f58c7e2b28fa
| user: test
| date: Thu Jan 01 00:00:00 1970 +0000
- | files: f3d f4e
+ | files: f1g
| description:
- | D2
+ | G1
|
|
- | diff --git a/f3d b/f3d
+ | diff --git a/f1g b/f1g
| new file mode 100644
| --- /dev/null
- | +++ b/f3d
+ | +++ b/f1g
| @@ -0,0 +1,1 @@
- | +c3a
- | diff --git a/f4e b/f4e
- | --- a/f4e
- | +++ b/f4e
- | @@ -1,1 +1,1 @@
- | -c4a
- | +c4d
+ | +c1g
|
- o changeset: 7:539cf145f496
- | user: test
- | date: Thu Jan 01 00:00:00 1970 +0000
- | files: f1e f2a f2c f5a f5b
- | copies: f2c (f2a) f5a (f5b)
- | description:
- | C2
+ | o changeset: 12:b5542d755b54
+ |/ parent: 2:f58c7e2b28fa
+ | user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | files: f1f
+ | description:
+ | F1
|
|
- | diff --git a/f1e b/f1e
- | --- a/f1e
- | +++ b/f1e
- | @@ -1,1 +1,1 @@
- | -c1a
- | +c1c
- | diff --git a/f2a b/f2c
- | rename from f2a
- | rename to f2c
- | diff --git a/f5b b/f5a
- | rename from f5b
- | rename to f5a
- | --- a/f5b
- | +++ b/f5a
- | @@ -1,1 +1,1 @@
- | -c5a
- | +c5c
+ | diff --git a/f1f b/f1f
+ | new file mode 100644
+ | --- /dev/null
+ | +++ b/f1f
+ | @@ -0,0 +1,1 @@
+ | +c1a
|
- o changeset: 6:6bd1736cab86
- | parent: 0:11f7a1b56675
- | user: test
- | date: Thu Jan 01 00:00:00 1970 +0000
- | files: f1a f1e f2a f3a f3e f4a f4e f5a f5b
- | copies: f1e (f1a) f3e (f3a) f4e (f4a) f5b (f5a)
- | description:
- | E0
+ | o changeset: 11:f8a162271246
+ |/ parent: 2:f58c7e2b28fa
+ | user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | files: f1e f2c f3e f4a f4e f5a f5b
+ | copies: f4e (f4a) f5b (f5a)
+ | description:
+ | E1
|
|
- | diff --git a/f1a b/f1e
- | rename from f1a
- | rename to f1e
- | diff --git a/f2a b/f2a
- | --- a/f2a
- | +++ b/f2a
- | @@ -1,1 +1,1 @@
- | -c2a
- | +c2e
- | diff --git a/f3a b/f3e
- | rename from f3a
- | rename to f3e
- | diff --git a/f4a b/f4e
- | rename from f4a
- | rename to f4e
- | diff --git a/f5a b/f5b
- | rename from f5a
- | rename to f5b
+ | diff --git a/f1e b/f1e
+ | new file mode 100644
+ | --- /dev/null
+ | +++ b/f1e
+ | @@ -0,0 +1,1 @@
+ | +c1a
+ | diff --git a/f2c b/f2c
+ | --- a/f2c
+ | +++ b/f2c
+ | @@ -1,1 +1,1 @@
+ | -c2a
+ | +c2e
+ | diff --git a/f3e b/f3e
+ | new file mode 100644
+ | --- /dev/null
+ | +++ b/f3e
+ | @@ -0,0 +1,1 @@
+ | +c3a
+ | diff --git a/f4a b/f4e
+ | rename from f4a
+ | rename to f4e
+ | diff --git a/f5a b/f5b
+ | rename from f5a
+ | rename to f5b
|
- | o changeset: 5:560daee679da
+ | o changeset: 10:93ee502e8b0a
| | user: test
| | date: Thu Jan 01 00:00:00 1970 +0000
- | | files: f3d f4a
+ | | files: f3d f4e
| | description:
- | | D1
+ | | D2
| |
| |
| | diff --git a/f3d b/f3d
@@ -1226,59 +1289,170 @@
| | +++ b/f3d
| | @@ -0,0 +1,1 @@
| | +c3a
- | | diff --git a/f4a b/f4a
- | | --- a/f4a
- | | +++ b/f4a
+ | | diff --git a/f4e b/f4e
+ | | --- a/f4e
+ | | +++ b/f4e
| | @@ -1,1 +1,1 @@
| | -c4a
| | +c4d
| |
- | o changeset: 4:c9763722f9bd
- |/ parent: 0:11f7a1b56675
- | user: test
- | date: Thu Jan 01 00:00:00 1970 +0000
- | files: f1a f2a f2c f5a
- | copies: f2c (f2a)
- | description:
- | C1
- |
- |
- | diff --git a/f1a b/f1a
- | --- a/f1a
- | +++ b/f1a
- | @@ -1,1 +1,1 @@
- | -c1a
- | +c1c
- | diff --git a/f2a b/f2c
- | rename from f2a
- | rename to f2c
- | diff --git a/f5a b/f5a
- | --- a/f5a
- | +++ b/f5a
- | @@ -1,1 +1,1 @@
- | -c5a
- | +c5c
- |
- | o changeset: 3:b69f5839d2d9
+ | o changeset: 9:539cf145f496
+ | | parent: 6:6bd1736cab86
| | user: test
| | date: Thu Jan 01 00:00:00 1970 +0000
- | | files: f3b f3d f4a
- | | copies: f3d (f3b)
+ | | files: f1e f2a f2c f5a f5b
+ | | copies: f2c (f2a) f5a (f5b)
| | description:
- | | D0
+ | | C2
+ | |
+ | |
+ | | diff --git a/f1e b/f1e
+ | | --- a/f1e
+ | | +++ b/f1e
+ | | @@ -1,1 +1,1 @@
+ | | -c1a
+ | | +c1c
+ | | diff --git a/f2a b/f2c
+ | | rename from f2a
+ | | rename to f2c
+ | | diff --git a/f5b b/f5a
+ | | rename from f5b
+ | | rename to f5a
+ | | --- a/f5b
+ | | +++ b/f5a
+ | | @@ -1,1 +1,1 @@
+ | | -c5a
+ | | +c5c
+ | |
+ | | o changeset: 8:ba67f08fb15a
+ | | | parent: 0:11f7a1b56675
+ | | | user: test
+ | | | date: Thu Jan 01 00:00:00 1970 +0000
+ | | | files: f1g
+ | | | copies: f1g (f1a)
+ | | | description:
+ | | | G0
+ | | |
+ | | |
+ | | | diff --git a/f1a b/f1g
+ | | | copy from f1a
+ | | | copy to f1g
+ | | | --- a/f1a
+ | | | +++ b/f1g
+ | | | @@ -1,1 +1,1 @@
+ | | | -c1a
+ | | | +c1g
+ | | |
+ | | | o changeset: 7:d376ab0d7fda
+ | | |/ parent: 0:11f7a1b56675
+ | | | user: test
+ | | | date: Thu Jan 01 00:00:00 1970 +0000
+ | | | files: f1f
+ | | | copies: f1f (f1a)
+ | | | description:
+ | | | F0
+ | | |
+ | | |
+ | | | diff --git a/f1a b/f1f
+ | | | copy from f1a
+ | | | copy to f1f
+ | | |
+ | o | changeset: 6:6bd1736cab86
+ | |/ parent: 0:11f7a1b56675
+ | | user: test
+ | | date: Thu Jan 01 00:00:00 1970 +0000
+ | | files: f1a f1e f2a f3a f3e f4a f4e f5a f5b
+ | | copies: f1e (f1a) f3e (f3a) f4e (f4a) f5b (f5a)
+ | | description:
+ | | E0
| |
| |
- | | diff --git a/f3b b/f3d
- | | rename from f3b
- | | rename to f3d
- | | diff --git a/f4a b/f4a
- | | --- a/f4a
- | | +++ b/f4a
- | | @@ -1,1 +1,1 @@
- | | -c4a
- | | +c4d
+ | | diff --git a/f1a b/f1e
+ | | rename from f1a
+ | | rename to f1e
+ | | diff --git a/f2a b/f2a
+ | | --- a/f2a
+ | | +++ b/f2a
+ | | @@ -1,1 +1,1 @@
+ | | -c2a
+ | | +c2e
+ | | diff --git a/f3a b/f3e
+ | | rename from f3a
+ | | rename to f3e
+ | | diff --git a/f4a b/f4e
+ | | rename from f4a
+ | | rename to f4e
+ | | diff --git a/f5a b/f5b
+ | | rename from f5a
+ | | rename to f5b
| |
- | o changeset: 2:f58c7e2b28fa
+ | | o changeset: 5:560daee679da
+ | | | user: test
+ | | | date: Thu Jan 01 00:00:00 1970 +0000
+ | | | files: f3d f4a
+ | | | description:
+ | | | D1
+ | | |
+ | | |
+ | | | diff --git a/f3d b/f3d
+ | | | new file mode 100644
+ | | | --- /dev/null
+ | | | +++ b/f3d
+ | | | @@ -0,0 +1,1 @@
+ | | | +c3a
+ | | | diff --git a/f4a b/f4a
+ | | | --- a/f4a
+ | | | +++ b/f4a
+ | | | @@ -1,1 +1,1 @@
+ | | | -c4a
+ | | | +c4d
+ | | |
+ | | o changeset: 4:c9763722f9bd
+ | |/ parent: 0:11f7a1b56675
+ | | user: test
+ | | date: Thu Jan 01 00:00:00 1970 +0000
+ | | files: f1a f2a f2c f5a
+ | | copies: f2c (f2a)
+ | | description:
+ | | C1
+ | |
+ | |
+ | | diff --git a/f1a b/f1a
+ | | --- a/f1a
+ | | +++ b/f1a
+ | | @@ -1,1 +1,1 @@
+ | | -c1a
+ | | +c1c
+ | | diff --git a/f2a b/f2c
+ | | rename from f2a
+ | | rename to f2c
+ | | diff --git a/f5a b/f5a
+ | | --- a/f5a
+ | | +++ b/f5a
+ | | @@ -1,1 +1,1 @@
+ | | -c5a
+ | | +c5c
+ | |
+ +---o changeset: 3:b69f5839d2d9
+ | | user: test
+ | | date: Thu Jan 01 00:00:00 1970 +0000
+ | | files: f3b f3d f4a
+ | | copies: f3d (f3b)
+ | | description:
+ | | D0
+ | |
+ | |
+ | | diff --git a/f3b b/f3d
+ | | rename from f3b
+ | | rename to f3d
+ | | diff --git a/f4a b/f4a
+ | | --- a/f4a
+ | | +++ b/f4a
+ | | @@ -1,1 +1,1 @@
+ | | -c4a
+ | | +c4d
+ | |
+ o | changeset: 2:f58c7e2b28fa
| | user: test
| | date: Thu Jan 01 00:00:00 1970 +0000
| | files: f1b f2a f2c f5a f5b
@@ -1305,7 +1479,7 @@
| | -c5a
| | +c5c
| |
- | o changeset: 1:3d7bba921b5d
+ o | changeset: 1:3d7bba921b5d
|/ user: test
| date: Thu Jan 01 00:00:00 1970 +0000
| files: f1a f1b f3a f3b f5a f5b
@@ -1363,9 +1537,6 @@
@@ -0,0 +1,1 @@
+c5a
- $ hg cat f2c
- c2e
-
Check superfluous filemerge of files renamed in the past but untouched by graft
$ echo a > a
@@ -1824,8 +1995,9 @@
$ hg up 9150fe93bec6
2 files updated, 0 files merged, 0 files removed, 0 files unresolved
- $ hg graft --abort
- abort: no interrupted graft to abort
+ $ hg abort
+ abort: no interrupted graft to abort (abortflag !)
+ abort: no operation in progress (abortcommand !)
[255]
when stripping is required
@@ -1854,7 +2026,13 @@
abort: cannot specify any other flag with '--abort'
[255]
- $ hg graft --abort
+#if abortcommand
+when in dry-run mode
+ $ hg abort --dry-run
+ graft in progress, will be aborted
+#endif
+
+ $ hg abort
graft aborted
working directory is now at 9150fe93bec6
$ hg log -GT "{rev}:{node|short} {desc}"
@@ -1879,7 +2057,7 @@
(use 'hg resolve' and 'hg graft --continue')
[255]
- $ hg graft --abort
+ $ hg abort
graft aborted
working directory is now at 9150fe93bec6
$ hg log -GT "{rev}:{node|short} {desc}"
@@ -1923,7 +2101,7 @@
$ hg phase -r 6 --public
- $ hg graft --abort
+ $ hg abort
cannot clean up public changesets 6ec71c037d94
graft aborted
working directory is now at 6ec71c037d94
@@ -2000,7 +2178,7 @@
new changesets 311dfc6cf3bf (1 drafts)
(run 'hg heads .' to see heads, 'hg merge' to merge)
- $ hg graft --abort
+ $ hg abort
new changesets detected on destination branch, can't strip
graft aborted
working directory is now at 6b98ff0062dd
--- a/tests/test-help-hide.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-help-hide.t Mon Jul 22 14:00:33 2019 -0400
@@ -60,8 +60,10 @@
resolve redo merges or set/view the merge status of files
revert restore files to their checkout state
root print the root (top) of the current working directory
+ shelve save and set aside changes from the working directory
status show changed files in the working directory
summary summarize working directory state
+ unshelve restore a shelved change to the working directory
update update working directory (or switch revisions)
Change import/export:
@@ -194,8 +196,10 @@
resolve redo merges or set/view the merge status of files
revert restore files to their checkout state
root print the root (top) of the current working directory
+ shelve save and set aside changes from the working directory
status show changed files in the working directory
summary summarize working directory state
+ unshelve restore a shelved change to the working directory
update update working directory (or switch revisions)
Change import/export:
--- a/tests/test-help.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-help.t Mon Jul 22 14:00:33 2019 -0400
@@ -112,8 +112,10 @@
resolve redo merges or set/view the merge status of files
revert restore files to their checkout state
root print the root (top) of the current working directory
+ shelve save and set aside changes from the working directory
status show changed files in the working directory
summary summarize working directory state
+ unshelve restore a shelved change to the working directory
update update working directory (or switch revisions)
Change import/export:
@@ -238,8 +240,10 @@
resolve redo merges or set/view the merge status of files
revert restore files to their checkout state
root print the root (top) of the current working directory
+ shelve save and set aside changes from the working directory
status show changed files in the working directory
summary summarize working directory state
+ unshelve restore a shelved change to the working directory
update update working directory (or switch revisions)
Change import/export:
@@ -375,7 +379,6 @@
relink recreates hardlinks between repository clones
schemes extend schemes with shortcuts to repository swarms
share share a common history between several working directories
- shelve save and restore changes to the working directory
strip strip changesets and their descendants from history
transplant command to transplant changesets from another branch
win32mbcs allow the use of MBCS paths with problematic encodings
@@ -399,11 +402,13 @@
basic commands:
+ abort abort an unfinished operation (EXPERIMENTAL)
add add the specified files on the next commit
annotate, blame
show changeset information by line for each file
clone make a copy of an existing repository
commit, ci commit the specified files or all outstanding changes
+ continue resumes an interrupted operation (EXPERIMENTAL)
diff diff repository (or selected files)
export dump the header and diffs for one or more changesets
forget forget the specified files on the next commit
@@ -557,6 +562,10 @@
Returns 0 on success.
+ options:
+
+ -T --template TEMPLATE display with template
+
(some details hidden, use --verbose to show
complete help)
@@ -615,6 +624,8 @@
Returns 0 on success, 1 if errors are encountered.
+ options:
+
(some details hidden, use --verbose to show complete help)
$ hg help diff
@@ -1076,6 +1087,7 @@
changegroups Changegroups
config Config Registrar
extensions Extension API
+ mergestate Mergestate
requirements Repository Requirements
revlogs Revision Logs
wireprotocol Wire Protocol
@@ -1301,6 +1313,13 @@
*empty chunk* at the end of each *delta group* denotes the boundary to the
next filelog sub-segment.
+non-existent subtopics print an error
+
+ $ hg help internals.foo
+ abort: no such help topic: internals.foo
+ (try 'hg help --keyword foo')
+ [255]
+
test advanced, deprecated and experimental options are hidden in command help
$ hg help debugoptADV
hg debugoptADV
@@ -1511,6 +1530,8 @@
"revlog-compression"
+ "bookmarks-in-store"
+
"profiling"
-----------
@@ -2334,6 +2355,13 @@
<tr><td colspan="2"><h2><a name="main" href="#main">Main Commands</a></h2></td></tr>
<tr><td>
+ <a href="/help/abort">
+ abort
+ </a>
+ </td><td>
+ abort an unfinished operation (EXPERIMENTAL)
+ </td></tr>
+ <tr><td>
<a href="/help/add">
add
</a>
@@ -2362,6 +2390,13 @@
commit the specified files or all outstanding changes
</td></tr>
<tr><td>
+ <a href="/help/continue">
+ continue
+ </a>
+ </td><td>
+ resumes an interrupted operation (EXPERIMENTAL)
+ </td></tr>
+ <tr><td>
<a href="/help/diff">
diff
</a>
@@ -2689,6 +2724,13 @@
(no help text available)
</td></tr>
<tr><td>
+ <a href="/help/shelve">
+ shelve
+ </a>
+ </td><td>
+ save and set aside changes from the working directory
+ </td></tr>
+ <tr><td>
<a href="/help/tag">
tag
</a>
@@ -2710,6 +2752,13 @@
apply one or more bundle files
</td></tr>
<tr><td>
+ <a href="/help/unshelve">
+ unshelve
+ </a>
+ </td><td>
+ restore a shelved change to the working directory
+ </td></tr>
+ <tr><td>
<a href="/help/verify">
verify
</a>
@@ -3425,6 +3474,13 @@
Extension API
</td></tr>
<tr><td>
+ <a href="/help/internals.mergestate">
+ mergestate
+ </a>
+ </td><td>
+ Mergestate
+ </td></tr>
+ <tr><td>
<a href="/help/internals.requirements">
requirements
</a>
--- a/tests/test-hgignore.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-hgignore.t Mon Jul 22 14:00:33 2019 -0400
@@ -90,25 +90,50 @@
Ensure that comments work:
- $ touch 'foo#bar' 'quux#'
+ $ touch 'foo#bar' 'quux#' 'quu0#'
#if no-windows
- $ touch 'baz\#wat'
+ $ touch 'baz\' 'baz\wat' 'ba0\#wat' 'ba1\\' 'ba1\\wat' 'quu0\'
#endif
+
$ cat <<'EOF' >> .hgignore
> # full-line comment
> # whitespace-only comment line
> syntax# pattern, no whitespace, then comment
> a.c # pattern, then whitespace, then comment
- > baz\\# # escaped comment character
+ > baz\\# # (escaped) backslash, then comment
+ > ba0\\\#w # (escaped) backslash, escaped comment character, then comment
+ > ba1\\\\# # (escaped) backslashes, then comment
> foo\#b # escaped comment character
> quux\## escaped comment character at end of name
> EOF
$ hg status
A dir/b.o
? .hgignore
- $ rm 'foo#bar' 'quux#'
+ ? quu0#
+ ? quu0\ (no-windows !)
+
+ $ cat <<'EOF' > .hgignore
+ > .*\.o
+ > syntax: glob
+ > syntax# pattern, no whitespace, then comment
+ > a.c # pattern, then whitespace, then comment
+ > baz\\#* # (escaped) backslash, then comment
+ > ba0\\\#w* # (escaped) backslash, escaped comment character, then comment
+ > ba1\\\\#* # (escaped) backslashes, then comment
+ > foo\#b* # escaped comment character
+ > quux\## escaped comment character at end of name
+ > quu0[\#]# escaped comment character inside [...]
+ > EOF
+ $ hg status
+ A dir/b.o
+ ? .hgignore
+ ? ba1\\wat (no-windows !)
+ ? baz\wat (no-windows !)
+ ? quu0\ (no-windows !)
+
+ $ rm 'foo#bar' 'quux#' 'quu0#'
#if no-windows
- $ rm 'baz\#wat'
+ $ rm 'baz\' 'baz\wat' 'ba0\#wat' 'ba1\\' 'ba1\\wat' 'quu0\'
#endif
Check that '^\.' does not ignore the root directory:
--- a/tests/test-hgweb-json.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-hgweb-json.t Mon Jul 22 14:00:33 2019 -0400
@@ -1875,6 +1875,10 @@
{
"earlycommands": [
{
+ "summary": "abort an unfinished operation (EXPERIMENTAL)",
+ "topic": "abort"
+ },
+ {
"summary": "add the specified files on the next commit",
"topic": "add"
},
@@ -1891,6 +1895,10 @@
"topic": "commit"
},
{
+ "summary": "resumes an interrupted operation (EXPERIMENTAL)",
+ "topic": "continue"
+ },
+ {
"summary": "diff repository (or selected files)",
"topic": "diff"
},
@@ -2057,6 +2065,10 @@
"topic": "root"
},
{
+ "summary": "save and set aside changes from the working directory",
+ "topic": "shelve"
+ },
+ {
"summary": "add one or more tags for the current or given revision",
"topic": "tag"
},
@@ -2069,6 +2081,10 @@
"topic": "unbundle"
},
{
+ "summary": "restore a shelved change to the working directory",
+ "topic": "unshelve"
+ },
+ {
"summary": "verify the integrity of the repository",
"topic": "verify"
},
--- a/tests/test-histedit-arguments.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-histedit-arguments.t Mon Jul 22 14:00:33 2019 -0400
@@ -1,3 +1,12 @@
+#testcases abortcommand abortflag
+
+#if abortflag
+ $ cat >> $HGRCPATH <<EOF
+ > [alias]
+ > abort = histedit --abort
+ > EOF
+#endif
+
Test argument handling and various data parsing
==================================================
@@ -47,8 +56,9 @@
$ hg histedit --continue
abort: no histedit in progress
[255]
- $ hg histedit --abort
- abort: no histedit in progress
+ $ hg abort
+ abort: no histedit in progress (abortflag !)
+ abort: no operation in progress (abortcommand !)
[255]
Run a dummy edit to make sure we get tip^^ correctly via revsingle.
@@ -358,7 +368,7 @@
Corrupt histedit state file
$ sed 's/8fda0c726bf2/123456789012/' .hg/histedit-state > ../corrupt-histedit
$ mv ../corrupt-histedit .hg/histedit-state
- $ hg histedit --abort
+ $ hg abort
warning: encountered an exception during histedit --abort; the repository may not have been completely cleaned up
abort: $TESTTMP/foo/.hg/strip-backup/*-histedit.hg: $ENOENT$ (glob) (windows !)
abort: $ENOENT$: '$TESTTMP/foo/.hg/strip-backup/*-histedit.hg' (glob) (no-windows !)
--- a/tests/test-histedit-no-backup.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-histedit-no-backup.t Mon Jul 22 14:00:33 2019 -0400
@@ -1,3 +1,12 @@
+#testcases abortcommand abortflag
+
+#if abortflag
+ $ cat >> $HGRCPATH <<EOF
+ > [alias]
+ > abort = histedit --abort
+ > EOF
+#endif
+
$ . "$TESTDIR/histedit-helpers.sh"
Enable extension used by this test
@@ -44,7 +53,7 @@
Editing (7d5187087c79), you may commit or record as needed now.
(hg histedit --continue to resume)
[1]
- $ hg histedit --abort
+ $ hg abort
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
saved backup bundle to $TESTTMP/foo/.hg/strip-backup/1d8f701c7b35-cf7be322-backup.hg
saved backup bundle to $TESTTMP/foo/.hg/strip-backup/5c0056670bce-b54b65d0-backup.hg
@@ -66,5 +75,6 @@
Editing (7d5187087c79), you may commit or record as needed now.
(hg histedit --continue to resume)
[1]
- $ hg histedit --abort
+
+ $ hg abort
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
--- a/tests/test-histedit-obsolete.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-histedit-obsolete.t Mon Jul 22 14:00:33 2019 -0400
@@ -1,3 +1,12 @@
+#testcases abortcommand abortflag
+
+#if abortflag
+ $ cat >> $HGRCPATH <<EOF
+ > [alias]
+ > abort = histedit --abort
+ > EOF
+#endif
+
$ . "$TESTDIR/histedit-helpers.sh"
Enable obsolete
@@ -522,7 +531,13 @@
(hg histedit --continue to resume)
[1]
- $ hg histedit --abort
+#if abortcommand
+when in dry-run mode
+ $ hg abort --dry-run
+ histedit in progress, will be aborted
+#endif
+
+ $ hg abort
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
saved backup bundle to $TESTTMP/abort/.hg/strip-backup/4dc06258baa6-dff4ef05-backup.hg
--- a/tests/test-https.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-https.t Mon Jul 22 14:00:33 2019 -0400
@@ -488,26 +488,26 @@
(could not negotiate a common security protocol (tls1.1+) with localhost; the likely cause is Mercurial is configured to be more secure than the server can support)
(consider contacting the operator of this server and ask them to support modern TLS protocol versions; or, set hostsecurity.localhost:minimumprotocol=tls1.0 to allow use of legacy, less secure protocols when communicating with this server)
(see https://mercurial-scm.org/wiki/SecureConnections for more info)
- abort: error: *unsupported protocol* (glob)
+ abort: error: .*(unsupported protocol|wrong ssl version).* (re)
[255]
$ P="$CERTSDIR" hg --config hostsecurity.minimumprotocol=tls1.1 id https://localhost:$HGPORT/
(could not negotiate a common security protocol (tls1.1+) with localhost; the likely cause is Mercurial is configured to be more secure than the server can support)
(consider contacting the operator of this server and ask them to support modern TLS protocol versions; or, set hostsecurity.localhost:minimumprotocol=tls1.0 to allow use of legacy, less secure protocols when communicating with this server)
(see https://mercurial-scm.org/wiki/SecureConnections for more info)
- abort: error: *unsupported protocol* (glob)
+ abort: error: .*(unsupported protocol|wrong ssl version).* (re)
[255]
$ P="$CERTSDIR" hg --config hostsecurity.minimumprotocol=tls1.2 id https://localhost:$HGPORT/
(could not negotiate a common security protocol (tls1.2+) with localhost; the likely cause is Mercurial is configured to be more secure than the server can support)
(consider contacting the operator of this server and ask them to support modern TLS protocol versions; or, set hostsecurity.localhost:minimumprotocol=tls1.0 to allow use of legacy, less secure protocols when communicating with this server)
(see https://mercurial-scm.org/wiki/SecureConnections for more info)
- abort: error: *unsupported protocol* (glob)
+ abort: error: .*(unsupported protocol|wrong ssl version).* (re)
[255]
$ P="$CERTSDIR" hg --config hostsecurity.minimumprotocol=tls1.2 id https://localhost:$HGPORT1/
(could not negotiate a common security protocol (tls1.2+) with localhost; the likely cause is Mercurial is configured to be more secure than the server can support)
(consider contacting the operator of this server and ask them to support modern TLS protocol versions; or, set hostsecurity.localhost:minimumprotocol=tls1.0 to allow use of legacy, less secure protocols when communicating with this server)
(see https://mercurial-scm.org/wiki/SecureConnections for more info)
- abort: error: *unsupported protocol* (glob)
+ abort: error: .*(unsupported protocol|wrong ssl version).* (re)
[255]
--insecure will allow TLS 1.0 connections and override configs
@@ -530,7 +530,7 @@
(could not negotiate a common security protocol (tls1.2+) with localhost; the likely cause is Mercurial is configured to be more secure than the server can support)
(consider contacting the operator of this server and ask them to support modern TLS protocol versions; or, set hostsecurity.localhost:minimumprotocol=tls1.0 to allow use of legacy, less secure protocols when communicating with this server)
(see https://mercurial-scm.org/wiki/SecureConnections for more info)
- abort: error: *unsupported protocol* (glob)
+ abort: error: .*(unsupported protocol|wrong ssl version).* (re)
[255]
.hg/hgrc file [hostsecurity] settings are applied to remote ui instances (issue5305)
@@ -543,7 +543,7 @@
(could not negotiate a common security protocol (tls1.2+) with localhost; the likely cause is Mercurial is configured to be more secure than the server can support)
(consider contacting the operator of this server and ask them to support modern TLS protocol versions; or, set hostsecurity.localhost:minimumprotocol=tls1.0 to allow use of legacy, less secure protocols when communicating with this server)
(see https://mercurial-scm.org/wiki/SecureConnections for more info)
- abort: error: *unsupported protocol* (glob)
+ abort: error: .*(unsupported protocol|wrong ssl version).* (re)
[255]
$ killdaemons.py hg0.pid
--- a/tests/test-issue1802.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-issue1802.t Mon Jul 22 14:00:33 2019 -0400
@@ -52,7 +52,6 @@
Simulate a Windows merge:
$ hg --config extensions.n=$TESTTMP/noexec.py merge --debug
- searching for copies back to rev 1
unmatched files in local:
b
resolving manifests
--- a/tests/test-issue522.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-issue522.t Mon Jul 22 14:00:33 2019 -0400
@@ -25,7 +25,6 @@
$ hg ci -qAm 'add bar'
$ hg merge --debug
- searching for copies back to rev 1
unmatched files in local:
bar
resolving manifests
--- a/tests/test-issue672.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-issue672.t Mon Jul 22 14:00:33 2019 -0400
@@ -25,7 +25,6 @@
created new head
$ hg merge --debug 1
- searching for copies back to rev 1
unmatched files in other:
1a
all copies found (* = to merge, ! = divergent, % = renamed and deleted):
@@ -54,7 +53,6 @@
1 files updated, 0 files merged, 1 files removed, 0 files unresolved
$ hg merge -y --debug 4
- searching for copies back to rev 1
unmatched files in local:
1a
all copies found (* = to merge, ! = divergent, % = renamed and deleted):
@@ -62,13 +60,13 @@
checking for directory renames
resolving manifests
branchmerge: True, force: False, partial: False
- ancestor: c64f439569a9, local: e327dca35ac8+, remote: 746e9549ea96
+ ancestor: c64f439569a9, local: f4a9cff3cd0b+, remote: 746e9549ea96
preserving 1a for resolve of 1a
starting 4 threads for background file closing (?)
1a: local copied/moved from 1 -> m (premerge)
picked tool ':merge' for 1a (binary False symlink False changedelete False)
merging 1a and 1 to 1a
- my 1a@e327dca35ac8+ other 1@746e9549ea96 ancestor 1@c64f439569a9
+ my 1a@f4a9cff3cd0b+ other 1@746e9549ea96 ancestor 1@c64f439569a9
premerge successful
0 files updated, 1 files merged, 0 files removed, 0 files unresolved
(branch merge, don't forget to commit)
@@ -77,7 +75,6 @@
1 files updated, 0 files merged, 1 files removed, 0 files unresolved
$ hg merge -y --debug 3
- searching for copies back to rev 1
unmatched files in other:
1a
all copies found (* = to merge, ! = divergent, % = renamed and deleted):
@@ -85,14 +82,14 @@
checking for directory renames
resolving manifests
branchmerge: True, force: False, partial: False
- ancestor: c64f439569a9, local: 746e9549ea96+, remote: e327dca35ac8
+ ancestor: c64f439569a9, local: 746e9549ea96+, remote: f4a9cff3cd0b
preserving 1 for resolve of 1a
removing 1
starting 4 threads for background file closing (?)
1a: remote moved from 1 -> m (premerge)
picked tool ':merge' for 1a (binary False symlink False changedelete False)
merging 1 and 1a to 1a
- my 1a@746e9549ea96+ other 1a@e327dca35ac8 ancestor 1@c64f439569a9
+ my 1a@746e9549ea96+ other 1a@f4a9cff3cd0b ancestor 1@c64f439569a9
premerge successful
0 files updated, 1 files merged, 0 files removed, 0 files unresolved
(branch merge, don't forget to commit)
--- a/tests/test-keyword.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-keyword.t Mon Jul 22 14:00:33 2019 -0400
@@ -392,13 +392,15 @@
+foo
do not process $Id:
xxx $
- record change 1/2 to 'a'? [Ynesfdaq?] y
+ record change 1/2 to 'a'?
+ (enter ? for help) [Ynesfdaq?] y
@@ -2,2 +3,3 @@
do not process $Id:
xxx $
+bar
- record change 2/2 to 'a'? [Ynesfdaq?] n
+ record change 2/2 to 'a'?
+ (enter ? for help) [Ynesfdaq?] n
$ hg identify
@@ -450,13 +452,15 @@
+foo
do not process $Id:
xxx $
- record change 1/2 to 'a'? [Ynesfdaq?] y
+ record change 1/2 to 'a'?
+ (enter ? for help) [Ynesfdaq?] y
@@ -2,2 +3,3 @@
do not process $Id:
xxx $
+bar
- record change 2/2 to 'a'? [Ynesfdaq?] y
+ record change 2/2 to 'a'?
+ (enter ? for help) [Ynesfdaq?] y
File a should be clean
@@ -516,7 +520,8 @@
new file mode 100644
@@ -0,0 +1,1 @@
+$Id$
- record this change to 'r'? [Ynesfdaq?] y
+ record this change to 'r'?
+ (enter ? for help) [Ynesfdaq?] y
resolving manifests
patching file r
@@ -546,11 +551,13 @@
> EOF
diff --git a/i b/i
new file mode 100644
- examine changes to 'i'? [Ynesfdaq?] y
+ examine changes to 'i'?
+ (enter ? for help) [Ynesfdaq?] y
@@ -0,0 +1,1 @@
+$Id$
- record this change to 'i'? [Ynesfdaq?] y
+ record this change to 'i'?
+ (enter ? for help) [Ynesfdaq?] y
resolving manifests
patching file i
@@ -1248,11 +1255,6 @@
Test restricted mode with unshelve
- $ cat <<EOF >> $HGRCPATH
- > [extensions]
- > shelve =
- > EOF
-
$ echo xxxx >> a
$ hg diff
diff -r 800511b3a22d a
--- a/tests/test-largefiles-misc.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-largefiles-misc.t Mon Jul 22 14:00:33 2019 -0400
@@ -957,8 +957,8 @@
$ hg merge
largefile f-different has a merge conflict
ancestor was 09d2af8dd22201dd8d48e5dcfcaed281ff9422c7
- keep (l)ocal e5fa44f2b31c1fb553b6021e7360d07d5d91ff5e or
- take (o)ther 7448d8798a4380162d4b56f9b452e2f6f9e24e7a? l
+ you can keep (l)ocal e5fa44f2b31c1fb553b6021e7360d07d5d91ff5e or take (o)ther 7448d8798a4380162d4b56f9b452e2f6f9e24e7a.
+ what do you want to do? l
getting changed largefiles
1 largefiles updated, 0 removed
0 files updated, 4 files merged, 0 files removed, 0 files unresolved
--- a/tests/test-largefiles-update.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-largefiles-update.t Mon Jul 22 14:00:33 2019 -0400
@@ -133,8 +133,8 @@
> EOF
largefile large1 has a merge conflict
ancestor was 4669e532d5b2c093a78eca010077e708a071bb64
- keep (l)ocal e5bb990443d6a92aaf7223813720f7566c9dd05b or
- take (o)ther 58e24f733a964da346e2407a2bee99d9001184f5? o
+ you can keep (l)ocal e5bb990443d6a92aaf7223813720f7566c9dd05b or take (o)ther 58e24f733a964da346e2407a2bee99d9001184f5.
+ what do you want to do? o
merging normal1
warning: conflicts while merging normal1! (edit, then use 'hg resolve --mark')
getting changed largefiles
@@ -161,8 +161,8 @@
> EOF
largefile large1 has a merge conflict
ancestor was 4669e532d5b2c093a78eca010077e708a071bb64
- keep (l)ocal e5bb990443d6a92aaf7223813720f7566c9dd05b or
- take (o)ther 58e24f733a964da346e2407a2bee99d9001184f5? o
+ you can keep (l)ocal e5bb990443d6a92aaf7223813720f7566c9dd05b or take (o)ther 58e24f733a964da346e2407a2bee99d9001184f5.
+ what do you want to do? o
getting changed largefiles
large1: largefile 58e24f733a964da346e2407a2bee99d9001184f5 not available from file:/*/$TESTTMP/repo (glob)
0 largefiles updated, 0 removed
@@ -361,8 +361,8 @@
> EOF
largefile large1 has a merge conflict
ancestor was 4669e532d5b2c093a78eca010077e708a071bb64
- keep (l)ocal ba94c2efe5b7c5e0af8d189295ce00553b0612b7 or
- take (o)ther e5bb990443d6a92aaf7223813720f7566c9dd05b? o
+ you can keep (l)ocal ba94c2efe5b7c5e0af8d189295ce00553b0612b7 or take (o)ther e5bb990443d6a92aaf7223813720f7566c9dd05b.
+ what do you want to do? o
getting changed largefiles
1 largefiles updated, 0 removed
1 files updated, 1 files merged, 0 files removed, 0 files unresolved
@@ -380,8 +380,8 @@
$ hg update 3 --config debug.dirstate.delaywrite=2
largefile large1 has a merge conflict
ancestor was 4669e532d5b2c093a78eca010077e708a071bb64
- keep (l)ocal ba94c2efe5b7c5e0af8d189295ce00553b0612b7 or
- take (o)ther e5bb990443d6a92aaf7223813720f7566c9dd05b? l
+ you can keep (l)ocal ba94c2efe5b7c5e0af8d189295ce00553b0612b7 or take (o)ther e5bb990443d6a92aaf7223813720f7566c9dd05b.
+ what do you want to do? l
1 files updated, 1 files merged, 0 files removed, 0 files unresolved
$ hg status -A large1
M large1
@@ -461,8 +461,8 @@
keep (l)argefile or use (n)ormal file? l
largefile large1 has a merge conflict
ancestor was 4669e532d5b2c093a78eca010077e708a071bb64
- keep (l)ocal ba94c2efe5b7c5e0af8d189295ce00553b0612b7 or
- take (o)ther e5bb990443d6a92aaf7223813720f7566c9dd05b? l
+ you can keep (l)ocal ba94c2efe5b7c5e0af8d189295ce00553b0612b7 or take (o)ther e5bb990443d6a92aaf7223813720f7566c9dd05b.
+ what do you want to do? l
2 files updated, 1 files merged, 0 files removed, 0 files unresolved
updated to "d65e59e952a9: #5"
1 other heads for branch "default"
@@ -497,8 +497,8 @@
keep (l)argefile or use (n)ormal file? l
largefile large1 has a merge conflict
ancestor was 4669e532d5b2c093a78eca010077e708a071bb64
- keep (l)ocal ba94c2efe5b7c5e0af8d189295ce00553b0612b7 or
- take (o)ther e5bb990443d6a92aaf7223813720f7566c9dd05b? l
+ you can keep (l)ocal ba94c2efe5b7c5e0af8d189295ce00553b0612b7 or take (o)ther e5bb990443d6a92aaf7223813720f7566c9dd05b.
+ what do you want to do? l
2 files updated, 1 files merged, 0 files removed, 0 files unresolved
updated to "d65e59e952a9: #5"
1 other heads for branch "default"
@@ -545,15 +545,17 @@
> l
> EOF
subrepository sub diverged (local revision: f74e50bd9e55, remote revision: d65e59e952a9)
- (M)erge, keep (l)ocal [working copy] or keep (r)emote [destination]? m
+ you can (m)erge, keep (l)ocal [working copy] or keep (r)emote [destination].
+ what do you want to do? m
subrepository sources for sub differ (in checked out version)
- use (l)ocal source (f74e50bd9e55) or (r)emote source (d65e59e952a9)? r
+ you can use (l)ocal source (f74e50bd9e55) or (r)emote source (d65e59e952a9).
+ what do you want to do? r
remote turned local largefile large2 into a normal file
keep (l)argefile or use (n)ormal file? l
largefile large1 has a merge conflict
ancestor was 4669e532d5b2c093a78eca010077e708a071bb64
- keep (l)ocal ba94c2efe5b7c5e0af8d189295ce00553b0612b7 or
- take (o)ther e5bb990443d6a92aaf7223813720f7566c9dd05b? l
+ you can keep (l)ocal ba94c2efe5b7c5e0af8d189295ce00553b0612b7 or take (o)ther e5bb990443d6a92aaf7223813720f7566c9dd05b.
+ what do you want to do? l
2 files updated, 1 files merged, 0 files removed, 0 files unresolved
0 files updated, 0 files merged, 0 files removed, 0 files unresolved
@@ -587,8 +589,8 @@
rebasing 1:72518492caa6 "#1"
largefile large1 has a merge conflict
ancestor was 4669e532d5b2c093a78eca010077e708a071bb64
- keep (l)ocal e5bb990443d6a92aaf7223813720f7566c9dd05b or
- take (o)ther 58e24f733a964da346e2407a2bee99d9001184f5? o
+ you can keep (l)ocal e5bb990443d6a92aaf7223813720f7566c9dd05b or take (o)ther 58e24f733a964da346e2407a2bee99d9001184f5.
+ what do you want to do? o
merging normal1
warning: conflicts while merging normal1! (edit, then use 'hg resolve --mark')
unresolved conflicts (see hg resolve, then hg rebase --continue)
@@ -612,8 +614,8 @@
rebasing 1:72518492caa6 "#1"
rebasing 4:07d6153b5c04 "#4"
file '.hglf/large1' was deleted in other [source] but was modified in local [dest].
- What do you want to do?
- use (c)hanged version, (d)elete, or leave (u)nresolved? c
+ You can use (c)hanged version, (d)elete, or leave (u)nresolved.
+ What do you want to do? c
$ hg diff -c "tip~1" --nodates .hglf/large1 | grep '^[+-][0-9a-z]'
-e5bb990443d6a92aaf7223813720f7566c9dd05b
--- a/tests/test-largefiles.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-largefiles.t Mon Jul 22 14:00:33 2019 -0400
@@ -1701,8 +1701,8 @@
$ hg merge
largefile sub/large4 has a merge conflict
ancestor was 971fb41e78fea4f8e0ba5244784239371cb00591
- keep (l)ocal d846f26643bfa8ec210be40cc93cc6b7ff1128ea or
- take (o)ther e166e74c7303192238d60af5a9c4ce9bef0b7928? l
+ you can keep (l)ocal d846f26643bfa8ec210be40cc93cc6b7ff1128ea or take (o)ther e166e74c7303192238d60af5a9c4ce9bef0b7928.
+ what do you want to do? l
getting changed largefiles
1 largefiles updated, 0 removed
3 files updated, 1 files merged, 0 files removed, 0 files unresolved
--- a/tests/test-lfconvert.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-lfconvert.t Mon Jul 22 14:00:33 2019 -0400
@@ -128,7 +128,9 @@
$ hg merge
tool internal:merge (for pattern stuff/maybelarge.dat) can't handle binary
no tool found to merge stuff/maybelarge.dat
- keep (l)ocal [working copy], take (o)ther [merge rev], or leave (u)nresolved for stuff/maybelarge.dat? u
+ file 'stuff/maybelarge.dat' needs to be resolved.
+ You can keep (l)ocal [working copy], take (o)ther [merge rev], or leave (u)nresolved.
+ What do you want to do? u
merging sub/normal2 and stuff/normal2 to stuff/normal2
0 files updated, 1 files merged, 0 files removed, 1 files unresolved
use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
@@ -158,7 +160,7 @@
initializing destination largefiles-repo
$ cd largefiles-repo
$ hg log -G --template "{rev}:{node|short} {desc|firstline}\n"
- o 5:8e05f5f2b77e merge
+ o 5:9cc5aa7204f0 merge
|\
| o 4:a5a02de7a8e4 remove large, normal3
| |
@@ -249,7 +251,7 @@
3 remove large, normal3
2 merge
1 add anotherlarge (should be a largefile)
- 0 Added tag mytag for changeset abacddda7028
+ 0 Added tag mytag for changeset 17126745edfd
$ cd ../normal-repo
$ cat >> .hg/hgrc <<EOF
> [extensions]
@@ -302,7 +304,7 @@
3 remove large, normal3
2 merge
1 add anotherlarge (should be a largefile)
- 0 Added tag mytag for changeset abacddda7028
+ 0 Added tag mytag for changeset 17126745edfd
$ hg -R largefiles-repo-hg log -G --template "{rev}:{node|short} {desc|firstline}\n"
o 7:2f08f66459b7 Added tag mytag for changeset 17126745edfd
@@ -372,7 +374,7 @@
4 remove large, normal3
3 merge
2 add anotherlarge (should be a largefile)
- 1 Added tag mytag for changeset abacddda7028
+ 1 Added tag mytag for changeset 17126745edfd
0 change branch name only
Ensure empty commits aren't lost in the conversion
--- a/tests/test-lfs-serve.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-lfs-serve.t Mon Jul 22 14:00:33 2019 -0400
@@ -537,8 +537,55 @@
$ hg clone -qU http://localhost:$HGPORT $TESTTMP/bulkfetch
+Cat doesn't prefetch unless data is needed (e.g. '-T {rawdata}' doesn't need it)
+
+ $ hg --cwd $TESTTMP/bulkfetch cat -vr tip lfspair1.bin -T '{rawdata}\n{path}\n'
+ lfs: assuming remote store: http://localhost:$HGPORT/.git/info/lfs
+ version https://git-lfs.github.com/spec/v1
+ oid sha256:cf1b2787b74e66547d931b6ebe28ff63303e803cb2baa14a8f57c4383d875782
+ size 20
+ x-is-binary 0
+
+ lfspair1.bin
+
+ $ hg --cwd $TESTTMP/bulkfetch cat -vr tip lfspair1.bin -T json
+ lfs: assuming remote store: http://localhost:$HGPORT/.git/info/lfs
+ [lfs: assuming remote store: http://localhost:$HGPORT/.git/info/lfs
+ lfs: downloading cf1b2787b74e66547d931b6ebe28ff63303e803cb2baa14a8f57c4383d875782 (20 bytes)
+ lfs: processed: cf1b2787b74e66547d931b6ebe28ff63303e803cb2baa14a8f57c4383d875782
+ lfs: downloaded 1 files (20 bytes)
+ lfs: found cf1b2787b74e66547d931b6ebe28ff63303e803cb2baa14a8f57c4383d875782 in the local lfs store
+
+ {
+ "data": "this is an lfs file\n",
+ "path": "lfspair1.bin",
+ "rawdata": "version https://git-lfs.github.com/spec/v1\noid sha256:cf1b2787b74e66547d931b6ebe28ff63303e803cb2baa14a8f57c4383d875782\nsize 20\nx-is-binary 0\n"
+ }
+ ]
+
+ $ rm -r $TESTTMP/bulkfetch/.hg/store/lfs
+
+ $ hg --cwd $TESTTMP/bulkfetch cat -vr tip lfspair1.bin -T '{data}\n'
+ lfs: assuming remote store: http://localhost:$HGPORT/.git/info/lfs
+ lfs: assuming remote store: http://localhost:$HGPORT/.git/info/lfs
+ lfs: downloading cf1b2787b74e66547d931b6ebe28ff63303e803cb2baa14a8f57c4383d875782 (20 bytes)
+ lfs: processed: cf1b2787b74e66547d931b6ebe28ff63303e803cb2baa14a8f57c4383d875782
+ lfs: downloaded 1 files (20 bytes)
+ lfs: found cf1b2787b74e66547d931b6ebe28ff63303e803cb2baa14a8f57c4383d875782 in the local lfs store
+ this is an lfs file
+
+ $ hg --cwd $TESTTMP/bulkfetch cat -vr tip lfspair2.bin
+ lfs: assuming remote store: http://localhost:$HGPORT/.git/info/lfs
+ lfs: assuming remote store: http://localhost:$HGPORT/.git/info/lfs
+ lfs: downloading d96eda2c74b56e95cfb5ffb66b6503e198cc6fc4a09dc877de925feebc65786e (24 bytes)
+ lfs: processed: d96eda2c74b56e95cfb5ffb66b6503e198cc6fc4a09dc877de925feebc65786e
+ lfs: downloaded 1 files (24 bytes)
+ lfs: found d96eda2c74b56e95cfb5ffb66b6503e198cc6fc4a09dc877de925feebc65786e in the local lfs store
+ this is an lfs file too
+
Export will prefetch all needed files across all needed revisions
+ $ rm -r $TESTTMP/bulkfetch/.hg/store/lfs
$ hg -R $TESTTMP/bulkfetch -v export -r 0:tip -o all.export
lfs: assuming remote store: http://localhost:$HGPORT/.git/info/lfs
exporting patches:
--- a/tests/test-log.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-log.t Mon Jul 22 14:00:33 2019 -0400
@@ -1081,7 +1081,7 @@
$ cd ..
log --follow --patch FILE in repository where linkrev isn't trustworthy
-(issue5376)
+(issue5376, issue6124)
$ hg init follow-dup
$ cd follow-dup
@@ -1129,6 +1129,16 @@
@@ -0,0 +1,1 @@
+0
+ $ hg log -pr . a
+ === 3: a3
+ diff -r 4ea02ba94d66 -r e7a6331a34f0 a
+ --- a/a
+ +++ b/a
+ @@ -1,2 +1,3 @@
+ 0
+ 1
+ +3
+
fctx.introrev() == 2, but fctx.linkrev() == 1
@@ -1150,6 +1160,9 @@
+0
+BROKEN: should show the same diff as for rev 2 above
+ $ hg log -pr . a
+
$ cd ..
Multiple copy sources of a file:
--- a/tests/test-match.py Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-match.py Mon Jul 22 14:00:33 2019 -0400
@@ -13,36 +13,36 @@
def testVisitdir(self):
m = matchmod.basematcher()
- self.assertTrue(m.visitdir(b'.'))
+ self.assertTrue(m.visitdir(b''))
self.assertTrue(m.visitdir(b'dir'))
def testVisitchildrenset(self):
m = matchmod.basematcher()
- self.assertEqual(m.visitchildrenset(b'.'), b'this')
+ self.assertEqual(m.visitchildrenset(b''), b'this')
self.assertEqual(m.visitchildrenset(b'dir'), b'this')
class AlwaysMatcherTests(unittest.TestCase):
def testVisitdir(self):
m = matchmod.alwaysmatcher()
- self.assertEqual(m.visitdir(b'.'), b'all')
+ self.assertEqual(m.visitdir(b''), b'all')
self.assertEqual(m.visitdir(b'dir'), b'all')
def testVisitchildrenset(self):
m = matchmod.alwaysmatcher()
- self.assertEqual(m.visitchildrenset(b'.'), b'all')
+ self.assertEqual(m.visitchildrenset(b''), b'all')
self.assertEqual(m.visitchildrenset(b'dir'), b'all')
class NeverMatcherTests(unittest.TestCase):
def testVisitdir(self):
m = matchmod.nevermatcher()
- self.assertFalse(m.visitdir(b'.'))
+ self.assertFalse(m.visitdir(b''))
self.assertFalse(m.visitdir(b'dir'))
def testVisitchildrenset(self):
m = matchmod.nevermatcher()
- self.assertEqual(m.visitchildrenset(b'.'), set())
+ self.assertEqual(m.visitchildrenset(b''), set())
self.assertEqual(m.visitchildrenset(b'dir'), set())
class PredicateMatcherTests(unittest.TestCase):
@@ -51,12 +51,12 @@
def testVisitdir(self):
m = matchmod.predicatematcher(lambda *a: False)
- self.assertTrue(m.visitdir(b'.'))
+ self.assertTrue(m.visitdir(b''))
self.assertTrue(m.visitdir(b'dir'))
def testVisitchildrenset(self):
m = matchmod.predicatematcher(lambda *a: False)
- self.assertEqual(m.visitchildrenset(b'.'), b'this')
+ self.assertEqual(m.visitchildrenset(b''), b'this')
self.assertEqual(m.visitchildrenset(b'dir'), b'this')
class PatternMatcherTests(unittest.TestCase):
@@ -64,7 +64,7 @@
def testVisitdirPrefix(self):
m = matchmod.match(b'x', b'', patterns=[b'path:dir/subdir'])
assert isinstance(m, matchmod.patternmatcher)
- self.assertTrue(m.visitdir(b'.'))
+ self.assertTrue(m.visitdir(b''))
self.assertTrue(m.visitdir(b'dir'))
self.assertEqual(m.visitdir(b'dir/subdir'), b'all')
# OPT: This should probably be 'all' if its parent is?
@@ -74,7 +74,7 @@
def testVisitchildrensetPrefix(self):
m = matchmod.match(b'x', b'', patterns=[b'path:dir/subdir'])
assert isinstance(m, matchmod.patternmatcher)
- self.assertEqual(m.visitchildrenset(b'.'), b'this')
+ self.assertEqual(m.visitchildrenset(b''), b'this')
self.assertEqual(m.visitchildrenset(b'dir'), b'this')
self.assertEqual(m.visitchildrenset(b'dir/subdir'), b'all')
# OPT: This should probably be 'all' if its parent is?
@@ -84,28 +84,28 @@
def testVisitdirRootfilesin(self):
m = matchmod.match(b'x', b'', patterns=[b'rootfilesin:dir/subdir'])
assert isinstance(m, matchmod.patternmatcher)
- self.assertTrue(m.visitdir(b'.'))
self.assertFalse(m.visitdir(b'dir/subdir/x'))
self.assertFalse(m.visitdir(b'folder'))
# FIXME: These should probably be True.
+ self.assertFalse(m.visitdir(b''))
self.assertFalse(m.visitdir(b'dir'))
self.assertFalse(m.visitdir(b'dir/subdir'))
def testVisitchildrensetRootfilesin(self):
m = matchmod.match(b'x', b'', patterns=[b'rootfilesin:dir/subdir'])
assert isinstance(m, matchmod.patternmatcher)
- self.assertEqual(m.visitchildrenset(b'.'), b'this')
self.assertEqual(m.visitchildrenset(b'dir/subdir/x'), set())
self.assertEqual(m.visitchildrenset(b'folder'), set())
- # FIXME: These should probably be {'subdir'} and 'this', respectively,
- # or at least 'this' and 'this'.
+ # FIXME: These should probably be {'dir'}, {'subdir'} and 'this',
+ # respectively, or at least 'this' for all three.
+ self.assertEqual(m.visitchildrenset(b''), set())
self.assertEqual(m.visitchildrenset(b'dir'), set())
self.assertEqual(m.visitchildrenset(b'dir/subdir'), set())
def testVisitdirGlob(self):
m = matchmod.match(b'x', b'', patterns=[b'glob:dir/z*'])
assert isinstance(m, matchmod.patternmatcher)
- self.assertTrue(m.visitdir(b'.'))
+ self.assertTrue(m.visitdir(b''))
self.assertTrue(m.visitdir(b'dir'))
self.assertFalse(m.visitdir(b'folder'))
# OPT: these should probably be False.
@@ -115,7 +115,7 @@
def testVisitchildrensetGlob(self):
m = matchmod.match(b'x', b'', patterns=[b'glob:dir/z*'])
assert isinstance(m, matchmod.patternmatcher)
- self.assertEqual(m.visitchildrenset(b'.'), b'this')
+ self.assertEqual(m.visitchildrenset(b''), b'this')
self.assertEqual(m.visitchildrenset(b'folder'), set())
self.assertEqual(m.visitchildrenset(b'dir'), b'this')
# OPT: these should probably be set().
@@ -127,7 +127,7 @@
def testVisitdirPrefix(self):
m = matchmod.match(b'x', b'', include=[b'path:dir/subdir'])
assert isinstance(m, matchmod.includematcher)
- self.assertTrue(m.visitdir(b'.'))
+ self.assertTrue(m.visitdir(b''))
self.assertTrue(m.visitdir(b'dir'))
self.assertEqual(m.visitdir(b'dir/subdir'), b'all')
# OPT: This should probably be 'all' if its parent is?
@@ -137,7 +137,7 @@
def testVisitchildrensetPrefix(self):
m = matchmod.match(b'x', b'', include=[b'path:dir/subdir'])
assert isinstance(m, matchmod.includematcher)
- self.assertEqual(m.visitchildrenset(b'.'), {b'dir'})
+ self.assertEqual(m.visitchildrenset(b''), {b'dir'})
self.assertEqual(m.visitchildrenset(b'dir'), {b'subdir'})
self.assertEqual(m.visitchildrenset(b'dir/subdir'), b'all')
# OPT: This should probably be 'all' if its parent is?
@@ -147,7 +147,7 @@
def testVisitdirRootfilesin(self):
m = matchmod.match(b'x', b'', include=[b'rootfilesin:dir/subdir'])
assert isinstance(m, matchmod.includematcher)
- self.assertTrue(m.visitdir(b'.'))
+ self.assertTrue(m.visitdir(b''))
self.assertTrue(m.visitdir(b'dir'))
self.assertTrue(m.visitdir(b'dir/subdir'))
self.assertFalse(m.visitdir(b'dir/subdir/x'))
@@ -156,7 +156,7 @@
def testVisitchildrensetRootfilesin(self):
m = matchmod.match(b'x', b'', include=[b'rootfilesin:dir/subdir'])
assert isinstance(m, matchmod.includematcher)
- self.assertEqual(m.visitchildrenset(b'.'), {b'dir'})
+ self.assertEqual(m.visitchildrenset(b''), {b'dir'})
self.assertEqual(m.visitchildrenset(b'dir'), {b'subdir'})
self.assertEqual(m.visitchildrenset(b'dir/subdir'), b'this')
self.assertEqual(m.visitchildrenset(b'dir/subdir/x'), set())
@@ -165,7 +165,7 @@
def testVisitdirGlob(self):
m = matchmod.match(b'x', b'', include=[b'glob:dir/z*'])
assert isinstance(m, matchmod.includematcher)
- self.assertTrue(m.visitdir(b'.'))
+ self.assertTrue(m.visitdir(b''))
self.assertTrue(m.visitdir(b'dir'))
self.assertFalse(m.visitdir(b'folder'))
# OPT: these should probably be False.
@@ -175,7 +175,7 @@
def testVisitchildrensetGlob(self):
m = matchmod.match(b'x', b'', include=[b'glob:dir/z*'])
assert isinstance(m, matchmod.includematcher)
- self.assertEqual(m.visitchildrenset(b'.'), {b'dir'})
+ self.assertEqual(m.visitchildrenset(b''), {b'dir'})
self.assertEqual(m.visitchildrenset(b'folder'), set())
self.assertEqual(m.visitchildrenset(b'dir'), b'this')
# OPT: these should probably be set().
@@ -187,7 +187,7 @@
def testVisitdir(self):
m = matchmod.exact(files=[b'dir/subdir/foo.txt'])
assert isinstance(m, matchmod.exactmatcher)
- self.assertTrue(m.visitdir(b'.'))
+ self.assertTrue(m.visitdir(b''))
self.assertTrue(m.visitdir(b'dir'))
self.assertTrue(m.visitdir(b'dir/subdir'))
self.assertFalse(m.visitdir(b'dir/subdir/foo.txt'))
@@ -198,7 +198,7 @@
def testVisitchildrenset(self):
m = matchmod.exact(files=[b'dir/subdir/foo.txt'])
assert isinstance(m, matchmod.exactmatcher)
- self.assertEqual(m.visitchildrenset(b'.'), {b'dir'})
+ self.assertEqual(m.visitchildrenset(b''), {b'dir'})
self.assertEqual(m.visitchildrenset(b'dir'), {b'subdir'})
self.assertEqual(m.visitchildrenset(b'dir/subdir'), {b'foo.txt'})
self.assertEqual(m.visitchildrenset(b'dir/subdir/x'), set())
@@ -212,7 +212,7 @@
# no file in a/b/c
b'a/b/c/d/file4.txt'])
assert isinstance(m, matchmod.exactmatcher)
- self.assertEqual(m.visitchildrenset(b'.'), {b'a', b'rootfile.txt'})
+ self.assertEqual(m.visitchildrenset(b''), {b'a', b'rootfile.txt'})
self.assertEqual(m.visitchildrenset(b'a'), {b'b', b'file1.txt'})
self.assertEqual(m.visitchildrenset(b'a/b'), {b'c', b'file2.txt'})
self.assertEqual(m.visitchildrenset(b'a/b/c'), {b'd'})
@@ -227,7 +227,7 @@
m2 = matchmod.alwaysmatcher()
dm = matchmod.differencematcher(m1, m2)
# dm should be equivalent to a nevermatcher.
- self.assertFalse(dm.visitdir(b'.'))
+ self.assertFalse(dm.visitdir(b''))
self.assertFalse(dm.visitdir(b'dir'))
self.assertFalse(dm.visitdir(b'dir/subdir'))
self.assertFalse(dm.visitdir(b'dir/subdir/z'))
@@ -240,7 +240,7 @@
m2 = matchmod.alwaysmatcher()
dm = matchmod.differencematcher(m1, m2)
# dm should be equivalent to a nevermatcher.
- self.assertEqual(dm.visitchildrenset(b'.'), set())
+ self.assertEqual(dm.visitchildrenset(b''), set())
self.assertEqual(dm.visitchildrenset(b'dir'), set())
self.assertEqual(dm.visitchildrenset(b'dir/subdir'), set())
self.assertEqual(dm.visitchildrenset(b'dir/subdir/z'), set())
@@ -258,7 +258,7 @@
# assertTrue does NOT verify that it's a bool, just that it's truthy.
# While we may want to eventually make these return 'all', they should
# not currently do so.
- self.assertEqual(dm.visitdir(b'.'), b'all')
+ self.assertEqual(dm.visitdir(b''), b'all')
self.assertEqual(dm.visitdir(b'dir'), b'all')
self.assertEqual(dm.visitdir(b'dir/subdir'), b'all')
self.assertEqual(dm.visitdir(b'dir/subdir/z'), b'all')
@@ -271,7 +271,7 @@
m2 = matchmod.nevermatcher()
dm = matchmod.differencematcher(m1, m2)
# dm should be equivalent to a alwaysmatcher.
- self.assertEqual(dm.visitchildrenset(b'.'), b'all')
+ self.assertEqual(dm.visitchildrenset(b''), b'all')
self.assertEqual(dm.visitchildrenset(b'dir'), b'all')
self.assertEqual(dm.visitchildrenset(b'dir/subdir'), b'all')
self.assertEqual(dm.visitchildrenset(b'dir/subdir/z'), b'all')
@@ -283,7 +283,7 @@
m1 = matchmod.alwaysmatcher()
m2 = matchmod.match(b'', b'', patterns=[b'path:dir/subdir'])
dm = matchmod.differencematcher(m1, m2)
- self.assertEqual(dm.visitdir(b'.'), True)
+ self.assertEqual(dm.visitdir(b''), True)
self.assertEqual(dm.visitdir(b'dir'), True)
self.assertFalse(dm.visitdir(b'dir/subdir'))
# OPT: We should probably return False for these; we don't because
@@ -298,7 +298,7 @@
m1 = matchmod.alwaysmatcher()
m2 = matchmod.match(b'', b'', patterns=[b'path:dir/subdir'])
dm = matchmod.differencematcher(m1, m2)
- self.assertEqual(dm.visitchildrenset(b'.'), b'this')
+ self.assertEqual(dm.visitchildrenset(b''), b'this')
self.assertEqual(dm.visitchildrenset(b'dir'), b'this')
self.assertEqual(dm.visitchildrenset(b'dir/subdir'), set())
self.assertEqual(dm.visitchildrenset(b'dir/foo'), b'all')
@@ -315,7 +315,7 @@
m1 = matchmod.match(b'', b'', include=[b'path:dir/subdir'])
m2 = matchmod.match(b'', b'', include=[b'rootfilesin:dir'])
dm = matchmod.differencematcher(m1, m2)
- self.assertEqual(dm.visitdir(b'.'), True)
+ self.assertEqual(dm.visitdir(b''), True)
self.assertEqual(dm.visitdir(b'dir'), True)
self.assertEqual(dm.visitdir(b'dir/subdir'), b'all')
self.assertFalse(dm.visitdir(b'dir/foo'))
@@ -330,7 +330,7 @@
m1 = matchmod.match(b'', b'', include=[b'path:dir/subdir'])
m2 = matchmod.match(b'', b'', include=[b'rootfilesin:dir'])
dm = matchmod.differencematcher(m1, m2)
- self.assertEqual(dm.visitchildrenset(b'.'), {b'dir'})
+ self.assertEqual(dm.visitchildrenset(b''), {b'dir'})
self.assertEqual(dm.visitchildrenset(b'dir'), {b'subdir'})
self.assertEqual(dm.visitchildrenset(b'dir/subdir'), b'all')
self.assertEqual(dm.visitchildrenset(b'dir/foo'), set())
@@ -348,7 +348,7 @@
m2 = matchmod.alwaysmatcher()
im = matchmod.intersectmatchers(m1, m2)
# im should be equivalent to a alwaysmatcher.
- self.assertEqual(im.visitdir(b'.'), b'all')
+ self.assertEqual(im.visitdir(b''), b'all')
self.assertEqual(im.visitdir(b'dir'), b'all')
self.assertEqual(im.visitdir(b'dir/subdir'), b'all')
self.assertEqual(im.visitdir(b'dir/subdir/z'), b'all')
@@ -361,7 +361,7 @@
m2 = matchmod.alwaysmatcher()
im = matchmod.intersectmatchers(m1, m2)
# im should be equivalent to a alwaysmatcher.
- self.assertEqual(im.visitchildrenset(b'.'), b'all')
+ self.assertEqual(im.visitchildrenset(b''), b'all')
self.assertEqual(im.visitchildrenset(b'dir'), b'all')
self.assertEqual(im.visitchildrenset(b'dir/subdir'), b'all')
self.assertEqual(im.visitchildrenset(b'dir/subdir/z'), b'all')
@@ -374,7 +374,7 @@
m2 = matchmod.nevermatcher()
im = matchmod.intersectmatchers(m1, m2)
# im should be equivalent to a nevermatcher.
- self.assertFalse(im.visitdir(b'.'))
+ self.assertFalse(im.visitdir(b''))
self.assertFalse(im.visitdir(b'dir'))
self.assertFalse(im.visitdir(b'dir/subdir'))
self.assertFalse(im.visitdir(b'dir/subdir/z'))
@@ -387,7 +387,7 @@
m2 = matchmod.nevermatcher()
im = matchmod.intersectmatchers(m1, m2)
# im should be equivalent to a nevermqtcher.
- self.assertEqual(im.visitchildrenset(b'.'), set())
+ self.assertEqual(im.visitchildrenset(b''), set())
self.assertEqual(im.visitchildrenset(b'dir'), set())
self.assertEqual(im.visitchildrenset(b'dir/subdir'), set())
self.assertEqual(im.visitchildrenset(b'dir/subdir/z'), set())
@@ -399,7 +399,7 @@
m1 = matchmod.alwaysmatcher()
m2 = matchmod.match(b'', b'', patterns=[b'path:dir/subdir'])
im = matchmod.intersectmatchers(m1, m2)
- self.assertEqual(im.visitdir(b'.'), True)
+ self.assertEqual(im.visitdir(b''), True)
self.assertEqual(im.visitdir(b'dir'), True)
self.assertEqual(im.visitdir(b'dir/subdir'), b'all')
self.assertFalse(im.visitdir(b'dir/foo'))
@@ -414,7 +414,7 @@
m1 = matchmod.alwaysmatcher()
m2 = matchmod.match(b'', b'', include=[b'path:dir/subdir'])
im = matchmod.intersectmatchers(m1, m2)
- self.assertEqual(im.visitchildrenset(b'.'), {b'dir'})
+ self.assertEqual(im.visitchildrenset(b''), {b'dir'})
self.assertEqual(im.visitchildrenset(b'dir'), {b'subdir'})
self.assertEqual(im.visitchildrenset(b'dir/subdir'), b'all')
self.assertEqual(im.visitchildrenset(b'dir/foo'), set())
@@ -429,7 +429,7 @@
m1 = matchmod.match(b'', b'', include=[b'path:dir/subdir'])
m2 = matchmod.match(b'', b'', include=[b'rootfilesin:dir'])
im = matchmod.intersectmatchers(m1, m2)
- self.assertEqual(im.visitdir(b'.'), True)
+ self.assertEqual(im.visitdir(b''), True)
self.assertEqual(im.visitdir(b'dir'), True)
self.assertFalse(im.visitdir(b'dir/subdir'))
self.assertFalse(im.visitdir(b'dir/foo'))
@@ -441,7 +441,7 @@
m1 = matchmod.match(b'', b'', include=[b'path:dir/subdir'])
m2 = matchmod.match(b'', b'', include=[b'rootfilesin:dir'])
im = matchmod.intersectmatchers(m1, m2)
- self.assertEqual(im.visitchildrenset(b'.'), {b'dir'})
+ self.assertEqual(im.visitchildrenset(b''), {b'dir'})
self.assertEqual(im.visitchildrenset(b'dir'), b'this')
self.assertEqual(im.visitchildrenset(b'dir/subdir'), set())
self.assertEqual(im.visitchildrenset(b'dir/foo'), set())
@@ -456,7 +456,7 @@
m2 = matchmod.match(b'', b'', include=[b'path:folder'])
im = matchmod.intersectmatchers(m1, m2)
# FIXME: is True correct here?
- self.assertEqual(im.visitdir(b'.'), True)
+ self.assertEqual(im.visitdir(b''), True)
self.assertFalse(im.visitdir(b'dir'))
self.assertFalse(im.visitdir(b'dir/subdir'))
self.assertFalse(im.visitdir(b'dir/foo'))
@@ -469,7 +469,7 @@
m2 = matchmod.match(b'', b'', include=[b'path:folder'])
im = matchmod.intersectmatchers(m1, m2)
# FIXME: is set() correct here?
- self.assertEqual(im.visitchildrenset(b'.'), set())
+ self.assertEqual(im.visitchildrenset(b''), set())
self.assertEqual(im.visitchildrenset(b'dir'), set())
self.assertEqual(im.visitchildrenset(b'dir/subdir'), set())
self.assertEqual(im.visitchildrenset(b'dir/foo'), set())
@@ -483,7 +483,7 @@
m1 = matchmod.match(b'', b'', include=[b'path:dir/subdir/x'])
m2 = matchmod.match(b'', b'', include=[b'path:dir/subdir'])
im = matchmod.intersectmatchers(m1, m2)
- self.assertEqual(im.visitdir(b'.'), True)
+ self.assertEqual(im.visitdir(b''), True)
self.assertEqual(im.visitdir(b'dir'), True)
self.assertEqual(im.visitdir(b'dir/subdir'), True)
self.assertFalse(im.visitdir(b'dir/foo'))
@@ -496,7 +496,7 @@
m1 = matchmod.match(b'', b'', include=[b'path:dir/subdir/x'])
m2 = matchmod.match(b'', b'', include=[b'path:dir/subdir'])
im = matchmod.intersectmatchers(m1, m2)
- self.assertEqual(im.visitchildrenset(b'.'), {b'dir'})
+ self.assertEqual(im.visitchildrenset(b''), {b'dir'})
self.assertEqual(im.visitchildrenset(b'dir'), {b'subdir'})
self.assertEqual(im.visitchildrenset(b'dir/subdir'), {b'x'})
self.assertEqual(im.visitchildrenset(b'dir/foo'), set())
@@ -512,7 +512,7 @@
m2 = matchmod.match(b'', b'', include=[b'path:dir/subdir/z'])
im = matchmod.intersectmatchers(m1, m2)
# OPT: these next three could probably be False as well.
- self.assertEqual(im.visitdir(b'.'), True)
+ self.assertEqual(im.visitdir(b''), True)
self.assertEqual(im.visitdir(b'dir'), True)
self.assertEqual(im.visitdir(b'dir/subdir'), True)
self.assertFalse(im.visitdir(b'dir/foo'))
@@ -525,7 +525,7 @@
m2 = matchmod.match(b'', b'', include=[b'path:dir/subdir/z'])
im = matchmod.intersectmatchers(m1, m2)
# OPT: these next two could probably be set() as well.
- self.assertEqual(im.visitchildrenset(b'.'), {b'dir'})
+ self.assertEqual(im.visitchildrenset(b''), {b'dir'})
self.assertEqual(im.visitchildrenset(b'dir'), {b'subdir'})
self.assertEqual(im.visitchildrenset(b'dir/subdir'), set())
self.assertEqual(im.visitchildrenset(b'dir/foo'), set())
@@ -540,7 +540,7 @@
m2 = matchmod.alwaysmatcher()
um = matchmod.unionmatcher([m1, m2])
# um should be equivalent to a alwaysmatcher.
- self.assertEqual(um.visitdir(b'.'), b'all')
+ self.assertEqual(um.visitdir(b''), b'all')
self.assertEqual(um.visitdir(b'dir'), b'all')
self.assertEqual(um.visitdir(b'dir/subdir'), b'all')
self.assertEqual(um.visitdir(b'dir/subdir/z'), b'all')
@@ -553,7 +553,7 @@
m2 = matchmod.alwaysmatcher()
um = matchmod.unionmatcher([m1, m2])
# um should be equivalent to a alwaysmatcher.
- self.assertEqual(um.visitchildrenset(b'.'), b'all')
+ self.assertEqual(um.visitchildrenset(b''), b'all')
self.assertEqual(um.visitchildrenset(b'dir'), b'all')
self.assertEqual(um.visitchildrenset(b'dir/subdir'), b'all')
self.assertEqual(um.visitchildrenset(b'dir/subdir/z'), b'all')
@@ -566,7 +566,7 @@
m2 = matchmod.alwaysmatcher()
um = matchmod.unionmatcher([m1, m2])
# um should be equivalent to a alwaysmatcher.
- self.assertEqual(um.visitdir(b'.'), b'all')
+ self.assertEqual(um.visitdir(b''), b'all')
self.assertEqual(um.visitdir(b'dir'), b'all')
self.assertEqual(um.visitdir(b'dir/subdir'), b'all')
self.assertEqual(um.visitdir(b'dir/subdir/z'), b'all')
@@ -579,7 +579,7 @@
m2 = matchmod.alwaysmatcher()
um = matchmod.unionmatcher([m1, m2])
# um should be equivalent to a alwaysmatcher.
- self.assertEqual(um.visitchildrenset(b'.'), b'all')
+ self.assertEqual(um.visitchildrenset(b''), b'all')
self.assertEqual(um.visitchildrenset(b'dir'), b'all')
self.assertEqual(um.visitchildrenset(b'dir/subdir'), b'all')
self.assertEqual(um.visitchildrenset(b'dir/subdir/z'), b'all')
@@ -592,7 +592,7 @@
m2 = matchmod.nevermatcher()
um = matchmod.unionmatcher([m1, m2])
# um should be equivalent to a alwaysmatcher.
- self.assertEqual(um.visitdir(b'.'), b'all')
+ self.assertEqual(um.visitdir(b''), b'all')
self.assertEqual(um.visitdir(b'dir'), b'all')
self.assertEqual(um.visitdir(b'dir/subdir'), b'all')
self.assertEqual(um.visitdir(b'dir/subdir/z'), b'all')
@@ -605,7 +605,7 @@
m2 = matchmod.nevermatcher()
um = matchmod.unionmatcher([m1, m2])
# um should be equivalent to a alwaysmatcher.
- self.assertEqual(um.visitchildrenset(b'.'), b'all')
+ self.assertEqual(um.visitchildrenset(b''), b'all')
self.assertEqual(um.visitchildrenset(b'dir'), b'all')
self.assertEqual(um.visitchildrenset(b'dir/subdir'), b'all')
self.assertEqual(um.visitchildrenset(b'dir/subdir/z'), b'all')
@@ -617,7 +617,7 @@
m1 = matchmod.alwaysmatcher()
m2 = matchmod.match(b'', b'', patterns=[b'path:dir/subdir'])
um = matchmod.unionmatcher([m1, m2])
- self.assertEqual(um.visitdir(b'.'), b'all')
+ self.assertEqual(um.visitdir(b''), b'all')
self.assertEqual(um.visitdir(b'dir'), b'all')
self.assertEqual(um.visitdir(b'dir/subdir'), b'all')
self.assertEqual(um.visitdir(b'dir/foo'), b'all')
@@ -629,7 +629,7 @@
m1 = matchmod.alwaysmatcher()
m2 = matchmod.match(b'', b'', include=[b'path:dir/subdir'])
um = matchmod.unionmatcher([m1, m2])
- self.assertEqual(um.visitchildrenset(b'.'), b'all')
+ self.assertEqual(um.visitchildrenset(b''), b'all')
self.assertEqual(um.visitchildrenset(b'dir'), b'all')
self.assertEqual(um.visitchildrenset(b'dir/subdir'), b'all')
self.assertEqual(um.visitchildrenset(b'dir/foo'), b'all')
@@ -643,7 +643,7 @@
m1 = matchmod.match(b'', b'', include=[b'path:dir/subdir'])
m2 = matchmod.match(b'', b'', include=[b'rootfilesin:dir'])
um = matchmod.unionmatcher([m1, m2])
- self.assertEqual(um.visitdir(b'.'), True)
+ self.assertEqual(um.visitdir(b''), True)
self.assertEqual(um.visitdir(b'dir'), True)
self.assertEqual(um.visitdir(b'dir/subdir'), b'all')
self.assertFalse(um.visitdir(b'dir/foo'))
@@ -656,7 +656,7 @@
m1 = matchmod.match(b'', b'', include=[b'path:dir/subdir'])
m2 = matchmod.match(b'', b'', include=[b'rootfilesin:dir'])
um = matchmod.unionmatcher([m1, m2])
- self.assertEqual(um.visitchildrenset(b'.'), {b'dir'})
+ self.assertEqual(um.visitchildrenset(b''), {b'dir'})
self.assertEqual(um.visitchildrenset(b'dir'), b'this')
self.assertEqual(um.visitchildrenset(b'dir/subdir'), b'all')
self.assertEqual(um.visitchildrenset(b'dir/foo'), set())
@@ -671,7 +671,7 @@
m1 = matchmod.match(b'', b'', include=[b'path:dir/subdir'])
m2 = matchmod.match(b'', b'', include=[b'path:folder'])
um = matchmod.unionmatcher([m1, m2])
- self.assertEqual(um.visitdir(b'.'), True)
+ self.assertEqual(um.visitdir(b''), True)
self.assertEqual(um.visitdir(b'dir'), True)
self.assertEqual(um.visitdir(b'dir/subdir'), b'all')
self.assertFalse(um.visitdir(b'dir/foo'))
@@ -684,7 +684,7 @@
m1 = matchmod.match(b'', b'', include=[b'path:dir/subdir'])
m2 = matchmod.match(b'', b'', include=[b'path:folder'])
um = matchmod.unionmatcher([m1, m2])
- self.assertEqual(um.visitchildrenset(b'.'), {b'folder', b'dir'})
+ self.assertEqual(um.visitchildrenset(b''), {b'folder', b'dir'})
self.assertEqual(um.visitchildrenset(b'dir'), {b'subdir'})
self.assertEqual(um.visitchildrenset(b'dir/subdir'), b'all')
self.assertEqual(um.visitchildrenset(b'dir/foo'), set())
@@ -699,7 +699,7 @@
m1 = matchmod.match(b'', b'', include=[b'path:dir/subdir/x'])
m2 = matchmod.match(b'', b'', include=[b'path:dir/subdir'])
um = matchmod.unionmatcher([m1, m2])
- self.assertEqual(um.visitdir(b'.'), True)
+ self.assertEqual(um.visitdir(b''), True)
self.assertEqual(um.visitdir(b'dir'), True)
self.assertEqual(um.visitdir(b'dir/subdir'), b'all')
self.assertFalse(um.visitdir(b'dir/foo'))
@@ -712,7 +712,7 @@
m1 = matchmod.match(b'', b'', include=[b'path:dir/subdir/x'])
m2 = matchmod.match(b'', b'', include=[b'path:dir/subdir'])
um = matchmod.unionmatcher([m1, m2])
- self.assertEqual(um.visitchildrenset(b'.'), {b'dir'})
+ self.assertEqual(um.visitchildrenset(b''), {b'dir'})
self.assertEqual(um.visitchildrenset(b'dir'), {b'subdir'})
self.assertEqual(um.visitchildrenset(b'dir/subdir'), b'all')
self.assertEqual(um.visitchildrenset(b'dir/foo'), set())
@@ -728,7 +728,7 @@
m2 = matchmod.match(b'', b'', include=[b'path:dir/subdir/z'])
um = matchmod.unionmatcher([m1, m2])
# OPT: these next three could probably be False as well.
- self.assertEqual(um.visitdir(b'.'), True)
+ self.assertEqual(um.visitdir(b''), True)
self.assertEqual(um.visitdir(b'dir'), True)
self.assertEqual(um.visitdir(b'dir/subdir'), True)
self.assertFalse(um.visitdir(b'dir/foo'))
@@ -740,7 +740,7 @@
m1 = matchmod.match(b'', b'', include=[b'path:dir/subdir/x'])
m2 = matchmod.match(b'', b'', include=[b'path:dir/subdir/z'])
um = matchmod.unionmatcher([m1, m2])
- self.assertEqual(um.visitchildrenset(b'.'), {b'dir'})
+ self.assertEqual(um.visitchildrenset(b''), {b'dir'})
self.assertEqual(um.visitchildrenset(b'dir'), {b'subdir'})
self.assertEqual(um.visitchildrenset(b'dir/subdir'), {b'x', b'z'})
self.assertEqual(um.visitchildrenset(b'dir/foo'), set())
@@ -754,7 +754,7 @@
m = matchmod.match(b'', b'', include=[b'path:dir/subdir'])
sm = matchmod.subdirmatcher(b'dir', m)
- self.assertEqual(sm.visitdir(b'.'), True)
+ self.assertEqual(sm.visitdir(b''), True)
self.assertEqual(sm.visitdir(b'subdir'), b'all')
# OPT: These next two should probably be 'all' not True.
self.assertEqual(sm.visitdir(b'subdir/x'), True)
@@ -765,7 +765,7 @@
m = matchmod.match(b'', b'', include=[b'path:dir/subdir'])
sm = matchmod.subdirmatcher(b'dir', m)
- self.assertEqual(sm.visitchildrenset(b'.'), {b'subdir'})
+ self.assertEqual(sm.visitchildrenset(b''), {b'subdir'})
self.assertEqual(sm.visitchildrenset(b'subdir'), b'all')
# OPT: These next two should probably be 'all' not 'this'.
self.assertEqual(sm.visitchildrenset(b'subdir/x'), b'this')
@@ -795,12 +795,12 @@
self.assertEqual(bool(pm(b'd/e/b.txt')), False)
self.assertEqual(bool(pm(b'd/e/f/b.txt')), True)
- self.assertEqual(m.visitdir(b'.'), True)
+ self.assertEqual(m.visitdir(b''), True)
self.assertEqual(m.visitdir(b'e'), True)
self.assertEqual(m.visitdir(b'e/f'), True)
self.assertEqual(m.visitdir(b'e/f/g'), False)
- self.assertEqual(pm.visitdir(b'.'), True)
+ self.assertEqual(pm.visitdir(b''), True)
self.assertEqual(pm.visitdir(b'd'), True)
self.assertEqual(pm.visitdir(b'd/e'), True)
self.assertEqual(pm.visitdir(b'd/e/f'), True)
@@ -814,7 +814,7 @@
# OPT: visitchildrenset could possibly return {'e'} and {'f'} for these
# next two, respectively; patternmatcher does not have this
# optimization.
- self.assertEqual(m.visitchildrenset(b'.'), b'this')
+ self.assertEqual(m.visitchildrenset(b''), b'this')
self.assertEqual(m.visitchildrenset(b'e'), b'this')
self.assertEqual(m.visitchildrenset(b'e/f'), b'this')
self.assertEqual(m.visitchildrenset(b'e/f/g'), set())
@@ -822,7 +822,7 @@
# OPT: visitchildrenset could possibly return {'d'}, {'e'}, and {'f'}
# for these next three, respectively; patternmatcher does not have this
# optimization.
- self.assertEqual(pm.visitchildrenset(b'.'), b'this')
+ self.assertEqual(pm.visitchildrenset(b''), b'this')
self.assertEqual(pm.visitchildrenset(b'd'), b'this')
self.assertEqual(pm.visitchildrenset(b'd/e'), b'this')
self.assertEqual(pm.visitchildrenset(b'd/e/f'), b'this')
--- a/tests/test-merge-changedelete.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-merge-changedelete.t Mon Jul 22 14:00:33 2019 -0400
@@ -55,11 +55,11 @@
$ hg merge -y
file 'file1' was deleted in other [merge rev] but was modified in local [working copy].
- What do you want to do?
- use (c)hanged version, (d)elete, or leave (u)nresolved? u
+ You can use (c)hanged version, (d)elete, or leave (u)nresolved.
+ What do you want to do? u
file 'file2' was deleted in local [working copy] but was modified in other [merge rev].
- What do you want to do?
- use (c)hanged version, leave (d)eleted, or leave (u)nresolved? u
+ You can use (c)hanged version, leave (d)eleted, or leave (u)nresolved.
+ What do you want to do? u
merging file3
warning: conflicts while merging file3! (edit, then use 'hg resolve --mark')
0 files updated, 0 files merged, 0 files removed, 3 files unresolved
@@ -124,11 +124,11 @@
> d
> EOF
file 'file1' was deleted in other [merge rev] but was modified in local [working copy].
- What do you want to do?
- use (c)hanged version, (d)elete, or leave (u)nresolved? c
+ You can use (c)hanged version, (d)elete, or leave (u)nresolved.
+ What do you want to do? c
file 'file2' was deleted in local [working copy] but was modified in other [merge rev].
- What do you want to do?
- use (c)hanged version, leave (d)eleted, or leave (u)nresolved? d
+ You can use (c)hanged version, leave (d)eleted, or leave (u)nresolved.
+ What do you want to do? d
merging file3
warning: conflicts while merging file3! (edit, then use 'hg resolve --mark')
0 files updated, 2 files merged, 0 files removed, 1 files unresolved
@@ -194,23 +194,23 @@
> c
> EOF
file 'file1' was deleted in other [merge rev] but was modified in local [working copy].
- What do you want to do?
- use (c)hanged version, (d)elete, or leave (u)nresolved? foo
+ You can use (c)hanged version, (d)elete, or leave (u)nresolved.
+ What do you want to do? foo
unrecognized response
file 'file1' was deleted in other [merge rev] but was modified in local [working copy].
- What do you want to do?
- use (c)hanged version, (d)elete, or leave (u)nresolved? bar
+ You can use (c)hanged version, (d)elete, or leave (u)nresolved.
+ What do you want to do? bar
unrecognized response
file 'file1' was deleted in other [merge rev] but was modified in local [working copy].
- What do you want to do?
- use (c)hanged version, (d)elete, or leave (u)nresolved? d
+ You can use (c)hanged version, (d)elete, or leave (u)nresolved.
+ What do you want to do? d
file 'file2' was deleted in local [working copy] but was modified in other [merge rev].
- What do you want to do?
- use (c)hanged version, leave (d)eleted, or leave (u)nresolved? baz
+ You can use (c)hanged version, leave (d)eleted, or leave (u)nresolved.
+ What do you want to do? baz
unrecognized response
file 'file2' was deleted in local [working copy] but was modified in other [merge rev].
- What do you want to do?
- use (c)hanged version, leave (d)eleted, or leave (u)nresolved? c
+ You can use (c)hanged version, leave (d)eleted, or leave (u)nresolved.
+ What do you want to do? c
merging file3
warning: conflicts while merging file3! (edit, then use 'hg resolve --mark')
0 files updated, 1 files merged, 1 files removed, 1 files unresolved
@@ -272,11 +272,11 @@
> d
> EOF
file 'file1' was deleted in other [merge rev] but was modified in local [working copy].
- What do you want to do?
- use (c)hanged version, (d)elete, or leave (u)nresolved? d
+ You can use (c)hanged version, (d)elete, or leave (u)nresolved.
+ What do you want to do? d
file 'file2' was deleted in local [working copy] but was modified in other [merge rev].
- What do you want to do?
- use (c)hanged version, leave (d)eleted, or leave (u)nresolved?
+ You can use (c)hanged version, leave (d)eleted, or leave (u)nresolved.
+ What do you want to do?
merging file3
warning: conflicts while merging file3! (edit, then use 'hg resolve --mark')
0 files updated, 0 files merged, 1 files removed, 2 files unresolved
@@ -485,12 +485,14 @@
$ hg merge --config ui.interactive=True --tool :prompt
file 'file1' was deleted in other [merge rev] but was modified in local [working copy].
- What do you want to do?
- use (c)hanged version, (d)elete, or leave (u)nresolved?
+ You can use (c)hanged version, (d)elete, or leave (u)nresolved.
+ What do you want to do?
file 'file2' was deleted in local [working copy] but was modified in other [merge rev].
- What do you want to do?
- use (c)hanged version, leave (d)eleted, or leave (u)nresolved?
- keep (l)ocal [working copy], take (o)ther [merge rev], or leave (u)nresolved for file3?
+ You can use (c)hanged version, leave (d)eleted, or leave (u)nresolved.
+ What do you want to do?
+ file 'file3' needs to be resolved.
+ You can keep (l)ocal [working copy], take (o)ther [merge rev], or leave (u)nresolved.
+ What do you want to do?
0 files updated, 0 files merged, 0 files removed, 3 files unresolved
use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
[1]
@@ -546,12 +548,14 @@
$ hg merge --tool :prompt
file 'file1' was deleted in other [merge rev] but was modified in local [working copy].
- What do you want to do?
- use (c)hanged version, (d)elete, or leave (u)nresolved? u
+ You can use (c)hanged version, (d)elete, or leave (u)nresolved.
+ What do you want to do? u
file 'file2' was deleted in local [working copy] but was modified in other [merge rev].
- What do you want to do?
- use (c)hanged version, leave (d)eleted, or leave (u)nresolved? u
- keep (l)ocal [working copy], take (o)ther [merge rev], or leave (u)nresolved for file3? u
+ You can use (c)hanged version, leave (d)eleted, or leave (u)nresolved.
+ What do you want to do? u
+ file 'file3' needs to be resolved.
+ You can keep (l)ocal [working copy], take (o)ther [merge rev], or leave (u)nresolved.
+ What do you want to do? u
0 files updated, 0 files merged, 0 files removed, 3 files unresolved
use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
[1]
@@ -605,11 +609,11 @@
$ hg merge --tool :merge3
file 'file1' was deleted in other [merge rev] but was modified in local [working copy].
- What do you want to do?
- use (c)hanged version, (d)elete, or leave (u)nresolved? u
+ You can use (c)hanged version, (d)elete, or leave (u)nresolved.
+ What do you want to do? u
file 'file2' was deleted in local [working copy] but was modified in other [merge rev].
- What do you want to do?
- use (c)hanged version, leave (d)eleted, or leave (u)nresolved? u
+ You can use (c)hanged version, leave (d)eleted, or leave (u)nresolved.
+ What do you want to do? u
merging file3
warning: conflicts while merging file3! (edit, then use 'hg resolve --mark')
0 files updated, 0 files merged, 0 files removed, 3 files unresolved
@@ -697,12 +701,14 @@
=== :other -> :prompt ===
file 'file1' was deleted in other [merge rev] but was modified in local [working copy].
- What do you want to do?
- use (c)hanged version, (d)elete, or leave (u)nresolved?
+ You can use (c)hanged version, (d)elete, or leave (u)nresolved.
+ What do you want to do?
file 'file2' was deleted in local [working copy] but was modified in other [merge rev].
- What do you want to do?
- use (c)hanged version, leave (d)eleted, or leave (u)nresolved?
- keep (l)ocal [working copy], take (o)ther [merge rev], or leave (u)nresolved for file3?
+ You can use (c)hanged version, leave (d)eleted, or leave (u)nresolved.
+ What do you want to do?
+ file 'file3' needs to be resolved.
+ You can keep (l)ocal [working copy], take (o)ther [merge rev], or leave (u)nresolved.
+ What do you want to do?
--- diff of status ---
(status identical)
@@ -727,12 +733,14 @@
=== :local -> :prompt ===
file 'file1' was deleted in other [merge rev] but was modified in local [working copy].
- What do you want to do?
- use (c)hanged version, (d)elete, or leave (u)nresolved?
+ You can use (c)hanged version, (d)elete, or leave (u)nresolved.
+ What do you want to do?
file 'file2' was deleted in local [working copy] but was modified in other [merge rev].
- What do you want to do?
- use (c)hanged version, leave (d)eleted, or leave (u)nresolved?
- keep (l)ocal [working copy], take (o)ther [merge rev], or leave (u)nresolved for file3?
+ You can use (c)hanged version, leave (d)eleted, or leave (u)nresolved.
+ What do you want to do?
+ file 'file3' needs to be resolved.
+ You can keep (l)ocal [working copy], take (o)ther [merge rev], or leave (u)nresolved.
+ What do you want to do?
--- diff of status ---
(status identical)
@@ -747,12 +755,14 @@
=== :fail -> :prompt ===
file 'file1' was deleted in other [merge rev] but was modified in local [working copy].
- What do you want to do?
- use (c)hanged version, (d)elete, or leave (u)nresolved?
+ You can use (c)hanged version, (d)elete, or leave (u)nresolved.
+ What do you want to do?
file 'file2' was deleted in local [working copy] but was modified in other [merge rev].
- What do you want to do?
- use (c)hanged version, leave (d)eleted, or leave (u)nresolved?
- keep (l)ocal [working copy], take (o)ther [merge rev], or leave (u)nresolved for file3?
+ You can use (c)hanged version, leave (d)eleted, or leave (u)nresolved.
+ What do you want to do?
+ file 'file3' needs to be resolved.
+ You can keep (l)ocal [working copy], take (o)ther [merge rev], or leave (u)nresolved.
+ What do you want to do?
--- diff of status ---
(status identical)
@@ -775,11 +785,11 @@
$ hg rm file2
$ hg update 1 -y
file 'file1' was deleted in other [destination] but was modified in local [working copy].
- What do you want to do?
- use (c)hanged version, (d)elete, or leave (u)nresolved? u
+ You can use (c)hanged version, (d)elete, or leave (u)nresolved.
+ What do you want to do? u
file 'file2' was deleted in local [working copy] but was modified in other [destination].
- What do you want to do?
- use (c)hanged version, leave (d)eleted, or leave (u)nresolved? u
+ You can use (c)hanged version, leave (d)eleted, or leave (u)nresolved.
+ What do you want to do? u
1 files updated, 0 files merged, 0 files removed, 2 files unresolved
use 'hg resolve' to retry unresolved file merges
[1]
@@ -953,11 +963,11 @@
$ hg rm file2
$ hg update 1 --config ui.interactive=True --tool :prompt
file 'file1' was deleted in other [destination] but was modified in local [working copy].
- What do you want to do?
- use (c)hanged version, (d)elete, or leave (u)nresolved?
+ You can use (c)hanged version, (d)elete, or leave (u)nresolved.
+ What do you want to do?
file 'file2' was deleted in local [working copy] but was modified in other [destination].
- What do you want to do?
- use (c)hanged version, leave (d)eleted, or leave (u)nresolved?
+ You can use (c)hanged version, leave (d)eleted, or leave (u)nresolved.
+ What do you want to do?
1 files updated, 0 files merged, 0 files removed, 2 files unresolved
use 'hg resolve' to retry unresolved file merges
[1]
@@ -1005,11 +1015,11 @@
$ hg rm file2
$ hg update 1 --tool :merge3
file 'file1' was deleted in other [destination] but was modified in local [working copy].
- What do you want to do?
- use (c)hanged version, (d)elete, or leave (u)nresolved? u
+ You can use (c)hanged version, (d)elete, or leave (u)nresolved.
+ What do you want to do? u
file 'file2' was deleted in local [working copy] but was modified in other [destination].
- What do you want to do?
- use (c)hanged version, leave (d)eleted, or leave (u)nresolved? u
+ You can use (c)hanged version, leave (d)eleted, or leave (u)nresolved.
+ What do you want to do? u
1 files updated, 0 files merged, 0 files removed, 2 files unresolved
use 'hg resolve' to retry unresolved file merges
[1]
@@ -1063,11 +1073,11 @@
=== :other -> :prompt ===
file 'file1' was deleted in other [destination] but was modified in local [working copy].
- What do you want to do?
- use (c)hanged version, (d)elete, or leave (u)nresolved?
+ You can use (c)hanged version, (d)elete, or leave (u)nresolved.
+ What do you want to do?
file 'file2' was deleted in local [working copy] but was modified in other [destination].
- What do you want to do?
- use (c)hanged version, leave (d)eleted, or leave (u)nresolved?
+ You can use (c)hanged version, leave (d)eleted, or leave (u)nresolved.
+ What do you want to do?
--- diff of status ---
(status identical)
@@ -1092,11 +1102,11 @@
=== :local -> :prompt ===
file 'file1' was deleted in other [destination] but was modified in local [working copy].
- What do you want to do?
- use (c)hanged version, (d)elete, or leave (u)nresolved?
+ You can use (c)hanged version, (d)elete, or leave (u)nresolved.
+ What do you want to do?
file 'file2' was deleted in local [working copy] but was modified in other [destination].
- What do you want to do?
- use (c)hanged version, leave (d)eleted, or leave (u)nresolved?
+ You can use (c)hanged version, leave (d)eleted, or leave (u)nresolved.
+ What do you want to do?
--- diff of status ---
(status identical)
@@ -1111,11 +1121,11 @@
=== :fail -> :prompt ===
file 'file1' was deleted in other [destination] but was modified in local [working copy].
- What do you want to do?
- use (c)hanged version, (d)elete, or leave (u)nresolved?
+ You can use (c)hanged version, (d)elete, or leave (u)nresolved.
+ What do you want to do?
file 'file2' was deleted in local [working copy] but was modified in other [destination].
- What do you want to do?
- use (c)hanged version, leave (d)eleted, or leave (u)nresolved?
+ You can use (c)hanged version, leave (d)eleted, or leave (u)nresolved.
+ What do you want to do?
--- diff of status ---
(status identical)
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-merge-combination.t Mon Jul 22 14:00:33 2019 -0400
@@ -0,0 +1,220 @@
+This file shows what hg says are "modified" files for a merge commit
+(hg log -T {files}), somewhat exhaustively.
+It shows merges that involves files contents changing, and merges that
+involve executable bit changing, but not merges with multiple or zero
+merge ancestors, nor copies/renames, and nor identical file contents
+with different filelog revisions.
+
+genmerges is the workhorse. Given:
+- a range function describing the possible values for file a
+- a isgood function to filter out uninteresting combination
+- a createfile function to actually write the values for file a on the
+filesystem
+it print a series of lines that look like: abcd C: output of -T {files}
+describing the file a at respectively the base, p2, p1, merge
+revision. "C" indicates that hg merge had conflicts.
+ $ genmerges () {
+ > for base in `range` -; do
+ > for r1 in `range $base` -; do
+ > for r2 in `range $base $r1` -; do
+ > for m in `range $base $r1 $r2` -; do
+ > line="$base$r1$r2$m"
+ > isgood $line || continue
+ > hg init repo
+ > cd repo
+ > make_commit () {
+ > v=$1; msg=$2; file=$3;
+ > if [ $v != - ]; then
+ > createfile $v
+ > else
+ > if [ -f a ]
+ > then rm a
+ > else touch $file
+ > fi
+ > fi
+ > hg commit -q -Am $msg || exit 123
+ > }
+ > echo foo > foo
+ > make_commit $base base b
+ > make_commit $r1 r1 c
+ > hg up -r 0 -q
+ > make_commit $r2 r2 d
+ > hg merge -q -r 1 > ../output 2>&1
+ > if [ $? -ne 0 ]; then rm -f *.orig; hg resolve -m --all -q; fi
+ > if [ -s ../output ]; then conflicts=" C"; else conflicts=" "; fi
+ > make_commit $m m e
+ > if [ $m = $r1 ] && [ $m = $r2 ]
+ > then expected=
+ > elif [ $m = $r1 ]
+ > then if [ $base = $r2 ]
+ > then expected=
+ > else expected=a
+ > fi
+ > elif [ $m = $r2 ]
+ > then if [ $base = $r1 ]
+ > then expected=
+ > else expected=a
+ > fi
+ > else expected=a
+ > fi
+ > got=`hg log -r 3 --template '{files}\n' | tr --delete 'e '`
+ > if [ "$got" = "$expected" ]
+ > then echo "$line$conflicts: agree on \"$got\""
+ > else echo "$line$conflicts: hg said \"$got\", expected \"$expected\""
+ > fi
+ > cd ../
+ > rm -rf repo
+ > done
+ > done
+ > done
+ > done
+ > }
+
+All the merges of various file contents.
+
+ $ range () {
+ > max=0
+ > for i in $@; do
+ > if [ $i = - ]; then continue; fi
+ > if [ $i -gt $max ]; then max=$i; fi
+ > done
+ > $TESTDIR/seq.py `expr $max + 1`
+ > }
+ $ isgood () { true; }
+ $ createfile () {
+ > if [ -f a ] && [ "`cat a`" = $1 ]
+ > then touch $file
+ > else echo $v > a
+ > fi
+ > }
+
+ $ genmerges
+ 1111 : agree on ""
+ 1112 : agree on "a"
+ 111- : agree on "a"
+ 1121 : agree on "a"
+ 1122 : agree on ""
+ 1123 : agree on "a"
+ 112- : agree on "a"
+ 11-1 : hg said "", expected "a"
+ 11-2 : agree on "a"
+ 11-- : agree on ""
+ 1211 : agree on "a"
+ 1212 : agree on ""
+ 1213 : agree on "a"
+ 121- : agree on "a"
+ 1221 : agree on "a"
+ 1222 : agree on ""
+ 1223 : agree on "a"
+ 122- : agree on "a"
+ 1231 C: agree on "a"
+ 1232 C: agree on "a"
+ 1233 C: agree on "a"
+ 1234 C: agree on "a"
+ 123- C: agree on "a"
+ 12-1 C: agree on "a"
+ 12-2 C: hg said "", expected "a"
+ 12-3 C: agree on "a"
+ 12-- C: agree on "a"
+ 1-11 : hg said "", expected "a"
+ 1-12 : agree on "a"
+ 1-1- : agree on ""
+ 1-21 C: agree on "a"
+ 1-22 C: hg said "", expected "a"
+ 1-23 C: agree on "a"
+ 1-2- C: agree on "a"
+ 1--1 : agree on "a"
+ 1--2 : agree on "a"
+ 1--- : agree on ""
+ -111 : agree on ""
+ -112 : agree on "a"
+ -11- : agree on "a"
+ -121 C: agree on "a"
+ -122 C: agree on "a"
+ -123 C: agree on "a"
+ -12- C: agree on "a"
+ -1-1 : agree on ""
+ -1-2 : agree on "a"
+ -1-- : agree on "a"
+ --11 : agree on ""
+ --12 : agree on "a"
+ --1- : agree on "a"
+ ---1 : agree on "a"
+ ---- : agree on ""
+
+All the merges of executable bit.
+
+ $ range () {
+ > max=a
+ > for i in $@; do
+ > if [ $i = - ]; then continue; fi
+ > if [ $i > $max ]; then max=$i; fi
+ > done
+ > if [ $max = a ]; then echo f; else echo f x; fi
+ > }
+ $ isgood () { case $line in *f*x*) true;; *) false;; esac; }
+ $ createfile () {
+ > if [ -f a ] && (([ -x a ] && [ $v = x ]) || (! [ -x a ] && [ $v != x ]))
+ > then touch $file
+ > else touch a; if [ $v = x ]; then chmod +x a; else chmod -x a; fi
+ > fi
+ > }
+
+#if execbit
+ $ genmerges
+ fffx : agree on "a"
+ ffxf : agree on "a"
+ ffxx : agree on ""
+ ffx- : agree on "a"
+ ff-x : hg said "", expected "a"
+ fxff : hg said "", expected "a"
+ fxfx : hg said "a", expected ""
+ fxf- : agree on "a"
+ fxxf : agree on "a"
+ fxxx : agree on ""
+ fxx- : agree on "a"
+ fx-f : hg said "", expected "a"
+ fx-x : hg said "", expected "a"
+ fx-- : hg said "", expected "a"
+ f-fx : agree on "a"
+ f-xf : agree on "a"
+ f-xx : hg said "", expected "a"
+ f-x- : agree on "a"
+ f--x : agree on "a"
+ -ffx : agree on "a"
+ -fxf C: agree on "a"
+ -fxx C: hg said "", expected "a"
+ -fx- C: agree on "a"
+ -f-x : hg said "", expected "a"
+ --fx : agree on "a"
+#endif
+
+Files modified or cleanly merged, with no greatest common ancestors:
+
+ $ hg init repo; cd repo
+ $ touch a0 b0; hg commit -qAm 0
+ $ hg up -qr null; touch a1 b1; hg commit -qAm 1
+ $ hg merge -qr 0; rm b*; hg commit -qAm 2
+ $ hg log -r . -T '{files}\n'
+ b0 b1
+ $ cd ../
+ $ rm -rf repo
+
+A few cases of criss-cross merges involving deletions (listing all
+such merges is probably too much). Both gcas contain $files, so we
+expect the final merge to behave like a merge with a single gca
+containing $files.
+
+ $ hg init repo; cd repo
+ $ files="c1 u1 c2 u2"
+ $ touch $files; hg commit -qAm '0 root'
+ $ for f in $files; do echo f > $f; done; hg commit -qAm '1 gca1'
+ $ hg up -qr0; hg revert -qr 1 --all; hg commit -qAm '2 gca2'
+ $ hg up -qr 1; hg merge -qr 2; rm *1; hg commit -qAm '3 p1'
+ $ hg up -qr 2; hg merge -qr 1; rm *2; hg commit -qAm '4 p2'
+ $ hg merge -qr 3; echo f > u1; echo f > u2; rm -f c1 c2
+ $ hg commit -qAm '5 merge with two gcas'
+ $ hg log -r . -T '{files}\n' # expecting u1 u2
+
+ $ cd ../
+ $ rm -rf repo
--- a/tests/test-merge-commit.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-merge-commit.t Mon Jul 22 14:00:33 2019 -0400
@@ -67,7 +67,6 @@
This should use bar@rev2 as the ancestor:
$ hg --debug merge 3
- searching for copies back to rev 1
resolving manifests
branchmerge: True, force: False, partial: False
ancestor: 0f2ff26688b9, local: 2263c1be0967+, remote: 0555950ead28
@@ -155,7 +154,6 @@
This should use bar@rev2 as the ancestor:
$ hg --debug merge 3
- searching for copies back to rev 1
resolving manifests
branchmerge: True, force: False, partial: False
ancestor: 0f2ff26688b9, local: 2263c1be0967+, remote: 3ffa6b9e35f0
--- a/tests/test-merge-criss-cross.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-merge-criss-cross.t Mon Jul 22 14:00:33 2019 -0400
@@ -11,15 +11,9 @@
$ hg up -qr0
$ echo '2 first change' > f2
- $ mkdir d1
- $ echo '0 base' > d1/f3
- $ echo '0 base' > d1/f4
- $ hg add -q d1
$ hg ci -qm '2 first change f2'
$ hg merge -qr 1
- $ hg rm d1/f3
- $ hg mv -q d1 d2
$ hg ci -m '3 merge'
$ hg up -qr2
@@ -30,38 +24,38 @@
$ hg ci -m '5 second change f1'
$ hg up -r3
- 2 files updated, 0 files merged, 2 files removed, 0 files unresolved
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
$ echo '6 second change' > f2
$ hg ci -m '6 second change f2'
$ hg log -G
- @ changeset: 6:6373bbfdae1d
+ @ changeset: 6:3b08d01b0ab5
| tag: tip
- | parent: 3:c202c8af058d
+ | parent: 3:cf89f02107e5
| user: test
| date: Thu Jan 01 00:00:00 1970 +0000
| summary: 6 second change f2
|
- | o changeset: 5:e673248094b1
+ | o changeset: 5:adfe50279922
| | user: test
| | date: Thu Jan 01 00:00:00 1970 +0000
| | summary: 5 second change f1
| |
- | o changeset: 4:177f58377c06
- | |\ parent: 2:d1d156401c1b
+ | o changeset: 4:7d3e55501ae6
+ | |\ parent: 2:40663881a6dd
| | | parent: 1:0f6b37dbe527
| | | user: test
| | | date: Thu Jan 01 00:00:00 1970 +0000
| | | summary: 4 merge
| | |
- o---+ changeset: 3:c202c8af058d
- | | | parent: 2:d1d156401c1b
+ o---+ changeset: 3:cf89f02107e5
+ | | | parent: 2:40663881a6dd
|/ / parent: 1:0f6b37dbe527
| | user: test
| | date: Thu Jan 01 00:00:00 1970 +0000
| | summary: 3 merge
| |
- | o changeset: 2:d1d156401c1b
+ | o changeset: 2:40663881a6dd
| | parent: 0:40494bf2444c
| | user: test
| | date: Thu Jan 01 00:00:00 1970 +0000
@@ -79,51 +73,26 @@
$ hg merge -v --debug --tool internal:dump 5 --config merge.preferancestor='!'
- note: using 0f6b37dbe527 as ancestor of 6373bbfdae1d and e673248094b1
- alternatively, use --config merge.preferancestor=d1d156401c1b
- searching for copies back to rev 3
- unmatched files in local:
- d2/f4
- unmatched files in other:
- d1/f3
- d1/f4
- all copies found (* = to merge, ! = divergent, % = renamed and deleted):
- src: 'd1/f4' -> dst: 'd2/f4'
- checking for directory renames
- discovered dir src: 'd1/' -> dst: 'd2/'
- pending file src: 'd1/f3' -> dst: 'd2/f3'
- pending file src: 'd1/f4' -> dst: 'd2/f4'
+ note: using 0f6b37dbe527 as ancestor of 3b08d01b0ab5 and adfe50279922
+ alternatively, use --config merge.preferancestor=40663881a6dd
resolving manifests
branchmerge: True, force: False, partial: False
- ancestor: 0f6b37dbe527, local: 6373bbfdae1d+, remote: e673248094b1
- preserving d2/f4 for resolve of d2/f4
+ ancestor: 0f6b37dbe527, local: 3b08d01b0ab5+, remote: adfe50279922
preserving f2 for resolve of f2
f1: remote is newer -> g
getting f1
- d2/f3: local directory rename - get from d1/f3 -> dg
- getting d1/f3 to d2/f3
- d2/f4: local directory rename, both created -> m (premerge)
f2: versions differ -> m (premerge)
picked tool ':dump' for f2 (binary False symlink False changedelete False)
merging f2
- my f2@6373bbfdae1d+ other f2@e673248094b1 ancestor f2@0f6b37dbe527
+ my f2@3b08d01b0ab5+ other f2@adfe50279922 ancestor f2@0f6b37dbe527
f2: versions differ -> m (merge)
picked tool ':dump' for f2 (binary False symlink False changedelete False)
- my f2@6373bbfdae1d+ other f2@e673248094b1 ancestor f2@0f6b37dbe527
- 3 files updated, 0 files merged, 0 files removed, 1 files unresolved
+ my f2@3b08d01b0ab5+ other f2@adfe50279922 ancestor f2@0f6b37dbe527
+ 1 files updated, 0 files merged, 0 files removed, 1 files unresolved
use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
[1]
- $ f --dump --recurse *
- d2: directory with 2 files
- d2/f3:
- >>>
- 0 base
- <<<
- d2/f4:
- >>>
- 0 base
- <<<
+ $ f --dump *
f1:
>>>
5 second change
@@ -151,13 +120,11 @@
$ hg up -qC .
$ hg merge -v --tool internal:dump 5 --config merge.preferancestor="null 40663881 3b08d"
- note: using 0f6b37dbe527 as ancestor of 6373bbfdae1d and e673248094b1
- alternatively, use --config merge.preferancestor=d1d156401c1b
+ note: using 40663881a6dd as ancestor of 3b08d01b0ab5 and adfe50279922
+ alternatively, use --config merge.preferancestor=0f6b37dbe527
resolving manifests
- getting f1
- getting d1/f3 to d2/f3
- merging f2
- 3 files updated, 0 files merged, 0 files removed, 1 files unresolved
+ merging f1
+ 0 files updated, 0 files merged, 0 files removed, 1 files unresolved
use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
[1]
@@ -166,70 +133,34 @@
$ rm f*
$ hg up -qC .
$ hg merge -v --debug --tool internal:dump 5 --config merge.preferancestor="*"
- note: merging 6373bbfdae1d+ and e673248094b1 using bids from ancestors 0f6b37dbe527 and d1d156401c1b
+ note: merging 3b08d01b0ab5+ and adfe50279922 using bids from ancestors 0f6b37dbe527 and 40663881a6dd
calculating bids for ancestor 0f6b37dbe527
- searching for copies back to rev 3
- unmatched files in local:
- d2/f4
- unmatched files in other:
- d1/f3
- d1/f4
- all copies found (* = to merge, ! = divergent, % = renamed and deleted):
- src: 'd1/f4' -> dst: 'd2/f4'
- checking for directory renames
- discovered dir src: 'd1/' -> dst: 'd2/'
- pending file src: 'd1/f3' -> dst: 'd2/f3'
- pending file src: 'd1/f4' -> dst: 'd2/f4'
resolving manifests
branchmerge: True, force: False, partial: False
- ancestor: 0f6b37dbe527, local: 6373bbfdae1d+, remote: e673248094b1
- d2/f3: local directory rename - get from d1/f3 -> dg
- d2/f4: local directory rename, both created -> m
+ ancestor: 0f6b37dbe527, local: 3b08d01b0ab5+, remote: adfe50279922
f1: remote is newer -> g
f2: versions differ -> m
- calculating bids for ancestor d1d156401c1b
- searching for copies back to rev 3
- unmatched files in local:
- d2/f4
- all copies found (* = to merge, ! = divergent, % = renamed and deleted):
- src: 'd1/f4' -> dst: 'd2/f4'
- checking for directory renames
- discovered dir src: 'd1/' -> dst: 'd2/'
+ calculating bids for ancestor 40663881a6dd
resolving manifests
branchmerge: True, force: False, partial: False
- ancestor: d1d156401c1b, local: 6373bbfdae1d+, remote: e673248094b1
+ ancestor: 40663881a6dd, local: 3b08d01b0ab5+, remote: adfe50279922
f1: versions differ -> m
f2: remote unchanged -> k
auction for merging merge bids
- d2/f3: consensus for dg
- d2/f4: consensus for m
f1: picking 'get' action
f2: picking 'keep' action
end of auction
- preserving d2/f4 for resolve of d2/f4
f1: remote is newer -> g
getting f1
f2: remote unchanged -> k
- d2/f3: local directory rename - get from d1/f3 -> dg
- getting d1/f3 to d2/f3
- d2/f4: local directory rename, both created -> m (premerge)
- 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
(branch merge, don't forget to commit)
- $ f --dump --recurse *
- d2: directory with 2 files
- d2/f3:
- >>>
- 0 base
- <<<
- d2/f4:
- >>>
- 0 base
- <<<
+ $ f --dump *
f1:
>>>
5 second change
@@ -243,79 +174,36 @@
The other way around:
$ hg up -C -r5
- 4 files updated, 0 files merged, 1 files removed, 0 files unresolved
+ 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
$ hg merge -v --debug --config merge.preferancestor="*"
- note: merging e673248094b1+ and 6373bbfdae1d using bids from ancestors 0f6b37dbe527 and d1d156401c1b
+ note: merging adfe50279922+ and 3b08d01b0ab5 using bids from ancestors 0f6b37dbe527 and 40663881a6dd
calculating bids for ancestor 0f6b37dbe527
- searching for copies back to rev 3
- unmatched files in local:
- d1/f3
- d1/f4
- unmatched files in other:
- d2/f4
- all copies found (* = to merge, ! = divergent, % = renamed and deleted):
- src: 'd1/f4' -> dst: 'd2/f4'
- checking for directory renames
- discovered dir src: 'd1/' -> dst: 'd2/'
- pending file src: 'd1/f3' -> dst: 'd2/f3'
- pending file src: 'd1/f4' -> dst: 'd2/f4'
resolving manifests
branchmerge: True, force: False, partial: False
- ancestor: 0f6b37dbe527, local: e673248094b1+, remote: 6373bbfdae1d
- d2/f3: remote directory rename - move from d1/f3 -> dm
- d2/f4: remote directory rename, both created -> m
+ ancestor: 0f6b37dbe527, local: adfe50279922+, remote: 3b08d01b0ab5
f1: remote unchanged -> k
f2: versions differ -> m
- calculating bids for ancestor d1d156401c1b
- searching for copies back to rev 3
- unmatched files in other:
- d2/f4
- all copies found (* = to merge, ! = divergent, % = renamed and deleted):
- src: 'd1/f4' -> dst: 'd2/f4'
- checking for directory renames
- discovered dir src: 'd1/' -> dst: 'd2/'
+ calculating bids for ancestor 40663881a6dd
resolving manifests
branchmerge: True, force: False, partial: False
- ancestor: d1d156401c1b, local: e673248094b1+, remote: 6373bbfdae1d
- d1/f3: other deleted -> r
- d1/f4: other deleted -> r
- d2/f4: remote created -> g
+ ancestor: 40663881a6dd, local: adfe50279922+, remote: 3b08d01b0ab5
f1: versions differ -> m
f2: remote is newer -> g
auction for merging merge bids
- d1/f3: consensus for r
- d1/f4: consensus for r
- d2/f3: consensus for dm
- d2/f4: picking 'get' action
f1: picking 'keep' action
f2: picking 'get' action
end of auction
- d1/f3: other deleted -> r
- removing d1/f3
- d1/f4: other deleted -> r
- removing d1/f4
- d2/f4: remote created -> g
- getting d2/f4
f2: remote is newer -> g
getting f2
f1: remote unchanged -> k
- 2 files updated, 0 files merged, 2 files removed, 0 files unresolved
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
(branch merge, don't forget to commit)
- $ f --dump --recurse *
- d2: directory with 2 files
- d2/f3:
- >>>
- 0 base
- <<<
- d2/f4:
- >>>
- 0 base
- <<<
+ $ f --dump *
f1:
>>>
5 second change
@@ -329,85 +217,55 @@
$ hg up -qC
$ hg merge
- 2 files updated, 0 files merged, 2 files removed, 0 files unresolved
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
(branch merge, don't forget to commit)
$ hg up -qC tip
$ hg merge -v
- note: merging 6373bbfdae1d+ and e673248094b1 using bids from ancestors 0f6b37dbe527 and d1d156401c1b
+ note: merging 3b08d01b0ab5+ and adfe50279922 using bids from ancestors 0f6b37dbe527 and 40663881a6dd
calculating bids for ancestor 0f6b37dbe527
resolving manifests
- calculating bids for ancestor d1d156401c1b
+ calculating bids for ancestor 40663881a6dd
resolving manifests
auction for merging merge bids
- d2/f3: consensus for dg
- d2/f4: consensus for m
f1: picking 'get' action
f2: picking 'keep' action
end of auction
getting f1
- getting d1/f3 to d2/f3
- 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
(branch merge, don't forget to commit)
$ hg up -qC
$ hg merge -v --debug --config merge.preferancestor="*"
- note: merging 6373bbfdae1d+ and e673248094b1 using bids from ancestors 0f6b37dbe527 and d1d156401c1b
+ note: merging 3b08d01b0ab5+ and adfe50279922 using bids from ancestors 0f6b37dbe527 and 40663881a6dd
calculating bids for ancestor 0f6b37dbe527
- searching for copies back to rev 3
- unmatched files in local:
- d2/f4
- unmatched files in other:
- d1/f3
- d1/f4
- all copies found (* = to merge, ! = divergent, % = renamed and deleted):
- src: 'd1/f4' -> dst: 'd2/f4'
- checking for directory renames
- discovered dir src: 'd1/' -> dst: 'd2/'
- pending file src: 'd1/f3' -> dst: 'd2/f3'
- pending file src: 'd1/f4' -> dst: 'd2/f4'
resolving manifests
branchmerge: True, force: False, partial: False
- ancestor: 0f6b37dbe527, local: 6373bbfdae1d+, remote: e673248094b1
- d2/f3: local directory rename - get from d1/f3 -> dg
- d2/f4: local directory rename, both created -> m
+ ancestor: 0f6b37dbe527, local: 3b08d01b0ab5+, remote: adfe50279922
f1: remote is newer -> g
f2: versions differ -> m
- calculating bids for ancestor d1d156401c1b
- searching for copies back to rev 3
- unmatched files in local:
- d2/f4
- all copies found (* = to merge, ! = divergent, % = renamed and deleted):
- src: 'd1/f4' -> dst: 'd2/f4'
- checking for directory renames
- discovered dir src: 'd1/' -> dst: 'd2/'
+ calculating bids for ancestor 40663881a6dd
resolving manifests
branchmerge: True, force: False, partial: False
- ancestor: d1d156401c1b, local: 6373bbfdae1d+, remote: e673248094b1
+ ancestor: 40663881a6dd, local: 3b08d01b0ab5+, remote: adfe50279922
f1: versions differ -> m
f2: remote unchanged -> k
auction for merging merge bids
- d2/f3: consensus for dg
- d2/f4: consensus for m
f1: picking 'get' action
f2: picking 'keep' action
end of auction
- preserving d2/f4 for resolve of d2/f4
f1: remote is newer -> g
getting f1
f2: remote unchanged -> k
- d2/f3: local directory rename - get from d1/f3 -> dg
- getting d1/f3 to d2/f3
- d2/f4: local directory rename, both created -> m (premerge)
- 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
(branch merge, don't forget to commit)
Test the greatest common ancestor returning multiple changesets
@@ -418,7 +276,7 @@
date: Thu Jan 01 00:00:00 1970 +0000
summary: 1 first change f1
- changeset: 2:d1d156401c1b
+ changeset: 2:40663881a6dd
parent: 0:40494bf2444c
user: test
date: Thu Jan 01 00:00:00 1970 +0000
@@ -513,3 +371,80 @@
2
$ cd ..
+
+ $ hg init issue5020
+ $ cd issue5020
+
+ $ echo a > noop
+ $ hg ci -qAm initial
+
+ $ echo b > noop
+ $ hg ci -qAm 'uninteresting change'
+
+ $ hg up -q 0
+ $ mkdir d1
+ $ echo a > d1/a
+ $ echo b > d1/b
+ $ hg ci -qAm 'add d1/a and d1/b'
+
+ $ hg merge -q 1
+ $ hg rm d1/a
+ $ hg mv -q d1 d2
+ $ hg ci -qm 'merge while removing d1/a and moving d1/b to d2/b'
+
+ $ hg up -q 1
+ $ hg merge -q 2
+ $ hg ci -qm 'merge (no changes while merging)'
+ $ hg log -G -T '{rev}:{node|short} {desc}'
+ @ 4:c0ef19750a22 merge (no changes while merging)
+ |\
+ +---o 3:6ca01f7342b9 merge while removing d1/a and moving d1/b to d2/b
+ | |/
+ | o 2:154e6000f54e add d1/a and d1/b
+ | |
+ o | 1:11b5b303e36c uninteresting change
+ |/
+ o 0:7b54db1ebf33 initial
+
+ $ hg merge 3 --debug
+ note: merging c0ef19750a22+ and 6ca01f7342b9 using bids from ancestors 11b5b303e36c and 154e6000f54e
+
+ calculating bids for ancestor 11b5b303e36c
+ unmatched files in local:
+ d1/a
+ d1/b
+ unmatched files in other:
+ d2/b
+ resolving manifests
+ branchmerge: True, force: False, partial: False
+ ancestor: 11b5b303e36c, local: c0ef19750a22+, remote: 6ca01f7342b9
+ d2/b: remote created -> g
+
+ calculating bids for ancestor 154e6000f54e
+ unmatched files in other:
+ d2/b
+ all copies found (* = to merge, ! = divergent, % = renamed and deleted):
+ src: 'd1/b' -> dst: 'd2/b'
+ checking for directory renames
+ discovered dir src: 'd1/' -> dst: 'd2/'
+ resolving manifests
+ branchmerge: True, force: False, partial: False
+ ancestor: 154e6000f54e, local: c0ef19750a22+, remote: 6ca01f7342b9
+ d1/a: other deleted -> r
+ d1/b: other deleted -> r
+ d2/b: remote created -> g
+
+ auction for merging merge bids
+ d1/a: consensus for r
+ d1/b: consensus for r
+ d2/b: consensus for g
+ end of auction
+
+ d1/a: other deleted -> r
+ removing d1/a
+ d1/b: other deleted -> r
+ removing d1/b
+ d2/b: remote created -> g
+ getting d2/b
+ 1 files updated, 0 files merged, 2 files removed, 0 files unresolved
+ (branch merge, don't forget to commit)
--- a/tests/test-merge-force.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-merge-force.t Mon Jul 22 14:00:33 2019 -0400
@@ -143,80 +143,80 @@
$ hg merge -f --tool internal:merge3 'desc("remote")' 2>&1 | tee $TESTTMP/merge-output-1
file 'content1_missing_content1_content4-tracked' was deleted in other [merge rev] but was modified in local [working copy].
- What do you want to do?
- use (c)hanged version, (d)elete, or leave (u)nresolved? u
+ You can use (c)hanged version, (d)elete, or leave (u)nresolved.
+ What do you want to do? u
file 'content1_missing_content3_content3-tracked' was deleted in other [merge rev] but was modified in local [working copy].
- What do you want to do?
- use (c)hanged version, (d)elete, or leave (u)nresolved? u
+ You can use (c)hanged version, (d)elete, or leave (u)nresolved.
+ What do you want to do? u
file 'content1_missing_content3_content4-tracked' was deleted in other [merge rev] but was modified in local [working copy].
- What do you want to do?
- use (c)hanged version, (d)elete, or leave (u)nresolved? u
+ You can use (c)hanged version, (d)elete, or leave (u)nresolved.
+ What do you want to do? u
file 'content1_missing_missing_content4-tracked' was deleted in other [merge rev] but was modified in local [working copy].
- What do you want to do?
- use (c)hanged version, (d)elete, or leave (u)nresolved? u
+ You can use (c)hanged version, (d)elete, or leave (u)nresolved.
+ What do you want to do? u
file 'content1_content2_content1_content1-untracked' was deleted in local [working copy] but was modified in other [merge rev].
- What do you want to do?
- use (c)hanged version, leave (d)eleted, or leave (u)nresolved? u
+ You can use (c)hanged version, leave (d)eleted, or leave (u)nresolved.
+ What do you want to do? u
file 'content1_content2_content1_content2-untracked' was deleted in local [working copy] but was modified in other [merge rev].
- What do you want to do?
- use (c)hanged version, leave (d)eleted, or leave (u)nresolved? u
+ You can use (c)hanged version, leave (d)eleted, or leave (u)nresolved.
+ What do you want to do? u
file 'content1_content2_content1_content4-untracked' was deleted in local [working copy] but was modified in other [merge rev].
- What do you want to do?
- use (c)hanged version, leave (d)eleted, or leave (u)nresolved? u
+ You can use (c)hanged version, leave (d)eleted, or leave (u)nresolved.
+ What do you want to do? u
file 'content1_content2_content1_missing-tracked' was deleted in local [working copy] but was modified in other [merge rev].
- What do you want to do?
- use (c)hanged version, leave (d)eleted, or leave (u)nresolved? u
+ You can use (c)hanged version, leave (d)eleted, or leave (u)nresolved.
+ What do you want to do? u
file 'content1_content2_content1_missing-untracked' was deleted in local [working copy] but was modified in other [merge rev].
- What do you want to do?
- use (c)hanged version, leave (d)eleted, or leave (u)nresolved? u
+ You can use (c)hanged version, leave (d)eleted, or leave (u)nresolved.
+ What do you want to do? u
file 'content1_content2_content2_content1-untracked' was deleted in local [working copy] but was modified in other [merge rev].
- What do you want to do?
- use (c)hanged version, leave (d)eleted, or leave (u)nresolved? u
+ You can use (c)hanged version, leave (d)eleted, or leave (u)nresolved.
+ What do you want to do? u
file 'content1_content2_content2_content2-untracked' was deleted in local [working copy] but was modified in other [merge rev].
- What do you want to do?
- use (c)hanged version, leave (d)eleted, or leave (u)nresolved? u
+ You can use (c)hanged version, leave (d)eleted, or leave (u)nresolved.
+ What do you want to do? u
file 'content1_content2_content2_content4-untracked' was deleted in local [working copy] but was modified in other [merge rev].
- What do you want to do?
- use (c)hanged version, leave (d)eleted, or leave (u)nresolved? u
+ You can use (c)hanged version, leave (d)eleted, or leave (u)nresolved.
+ What do you want to do? u
file 'content1_content2_content2_missing-tracked' was deleted in local [working copy] but was modified in other [merge rev].
- What do you want to do?
- use (c)hanged version, leave (d)eleted, or leave (u)nresolved? u
+ You can use (c)hanged version, leave (d)eleted, or leave (u)nresolved.
+ What do you want to do? u
file 'content1_content2_content2_missing-untracked' was deleted in local [working copy] but was modified in other [merge rev].
- What do you want to do?
- use (c)hanged version, leave (d)eleted, or leave (u)nresolved? u
+ You can use (c)hanged version, leave (d)eleted, or leave (u)nresolved.
+ What do you want to do? u
file 'content1_content2_content3_content1-untracked' was deleted in local [working copy] but was modified in other [merge rev].
- What do you want to do?
- use (c)hanged version, leave (d)eleted, or leave (u)nresolved? u
+ You can use (c)hanged version, leave (d)eleted, or leave (u)nresolved.
+ What do you want to do? u
file 'content1_content2_content3_content2-untracked' was deleted in local [working copy] but was modified in other [merge rev].
- What do you want to do?
- use (c)hanged version, leave (d)eleted, or leave (u)nresolved? u
+ You can use (c)hanged version, leave (d)eleted, or leave (u)nresolved.
+ What do you want to do? u
file 'content1_content2_content3_content3-untracked' was deleted in local [working copy] but was modified in other [merge rev].
- What do you want to do?
- use (c)hanged version, leave (d)eleted, or leave (u)nresolved? u
+ You can use (c)hanged version, leave (d)eleted, or leave (u)nresolved.
+ What do you want to do? u
file 'content1_content2_content3_content4-untracked' was deleted in local [working copy] but was modified in other [merge rev].
- What do you want to do?
- use (c)hanged version, leave (d)eleted, or leave (u)nresolved? u
+ You can use (c)hanged version, leave (d)eleted, or leave (u)nresolved.
+ What do you want to do? u
file 'content1_content2_content3_missing-tracked' was deleted in local [working copy] but was modified in other [merge rev].
- What do you want to do?
- use (c)hanged version, leave (d)eleted, or leave (u)nresolved? u
+ You can use (c)hanged version, leave (d)eleted, or leave (u)nresolved.
+ What do you want to do? u
file 'content1_content2_content3_missing-untracked' was deleted in local [working copy] but was modified in other [merge rev].
- What do you want to do?
- use (c)hanged version, leave (d)eleted, or leave (u)nresolved? u
+ You can use (c)hanged version, leave (d)eleted, or leave (u)nresolved.
+ What do you want to do? u
file 'content1_content2_missing_content1-untracked' was deleted in local [working copy] but was modified in other [merge rev].
- What do you want to do?
- use (c)hanged version, leave (d)eleted, or leave (u)nresolved? u
+ You can use (c)hanged version, leave (d)eleted, or leave (u)nresolved.
+ What do you want to do? u
file 'content1_content2_missing_content2-untracked' was deleted in local [working copy] but was modified in other [merge rev].
- What do you want to do?
- use (c)hanged version, leave (d)eleted, or leave (u)nresolved? u
+ You can use (c)hanged version, leave (d)eleted, or leave (u)nresolved.
+ What do you want to do? u
file 'content1_content2_missing_content4-untracked' was deleted in local [working copy] but was modified in other [merge rev].
- What do you want to do?
- use (c)hanged version, leave (d)eleted, or leave (u)nresolved? u
+ You can use (c)hanged version, leave (d)eleted, or leave (u)nresolved.
+ What do you want to do? u
file 'content1_content2_missing_missing-tracked' was deleted in local [working copy] but was modified in other [merge rev].
- What do you want to do?
- use (c)hanged version, leave (d)eleted, or leave (u)nresolved? u
+ You can use (c)hanged version, leave (d)eleted, or leave (u)nresolved.
+ What do you want to do? u
file 'content1_content2_missing_missing-untracked' was deleted in local [working copy] but was modified in other [merge rev].
- What do you want to do?
- use (c)hanged version, leave (d)eleted, or leave (u)nresolved? u
+ You can use (c)hanged version, leave (d)eleted, or leave (u)nresolved.
+ What do you want to do? u
merging content1_content2_content1_content4-tracked
merging content1_content2_content2_content1-tracked
merging content1_content2_content2_content4-tracked
@@ -729,88 +729,88 @@
$ hg resolve --unmark --all
$ hg resolve --all --tool internal:merge3
file 'content1_content2_content1_content1-untracked' was deleted in local [working copy] but was modified in other [merge rev].
- What do you want to do?
- use (c)hanged version, leave (d)eleted, or leave (u)nresolved? u
+ You can use (c)hanged version, leave (d)eleted, or leave (u)nresolved.
+ What do you want to do? u
file 'content1_content2_content1_content2-untracked' was deleted in local [working copy] but was modified in other [merge rev].
- What do you want to do?
- use (c)hanged version, leave (d)eleted, or leave (u)nresolved? u
+ You can use (c)hanged version, leave (d)eleted, or leave (u)nresolved.
+ What do you want to do? u
merging content1_content2_content1_content4-tracked
file 'content1_content2_content1_content4-untracked' was deleted in local [working copy] but was modified in other [merge rev].
- What do you want to do?
- use (c)hanged version, leave (d)eleted, or leave (u)nresolved? u
+ You can use (c)hanged version, leave (d)eleted, or leave (u)nresolved.
+ What do you want to do? u
file 'content1_content2_content1_missing-tracked' was deleted in local [working copy] but was modified in other [merge rev].
- What do you want to do?
- use (c)hanged version, leave (d)eleted, or leave (u)nresolved? u
+ You can use (c)hanged version, leave (d)eleted, or leave (u)nresolved.
+ What do you want to do? u
file 'content1_content2_content1_missing-untracked' was deleted in local [working copy] but was modified in other [merge rev].
- What do you want to do?
- use (c)hanged version, leave (d)eleted, or leave (u)nresolved? u
+ You can use (c)hanged version, leave (d)eleted, or leave (u)nresolved.
+ What do you want to do? u
merging content1_content2_content2_content1-tracked
file 'content1_content2_content2_content1-untracked' was deleted in local [working copy] but was modified in other [merge rev].
- What do you want to do?
- use (c)hanged version, leave (d)eleted, or leave (u)nresolved? u
+ You can use (c)hanged version, leave (d)eleted, or leave (u)nresolved.
+ What do you want to do? u
file 'content1_content2_content2_content2-untracked' was deleted in local [working copy] but was modified in other [merge rev].
- What do you want to do?
- use (c)hanged version, leave (d)eleted, or leave (u)nresolved? u
+ You can use (c)hanged version, leave (d)eleted, or leave (u)nresolved.
+ What do you want to do? u
merging content1_content2_content2_content4-tracked
file 'content1_content2_content2_content4-untracked' was deleted in local [working copy] but was modified in other [merge rev].
- What do you want to do?
- use (c)hanged version, leave (d)eleted, or leave (u)nresolved? u
+ You can use (c)hanged version, leave (d)eleted, or leave (u)nresolved.
+ What do you want to do? u
file 'content1_content2_content2_missing-tracked' was deleted in local [working copy] but was modified in other [merge rev].
- What do you want to do?
- use (c)hanged version, leave (d)eleted, or leave (u)nresolved? u
+ You can use (c)hanged version, leave (d)eleted, or leave (u)nresolved.
+ What do you want to do? u
file 'content1_content2_content2_missing-untracked' was deleted in local [working copy] but was modified in other [merge rev].
- What do you want to do?
- use (c)hanged version, leave (d)eleted, or leave (u)nresolved? u
+ You can use (c)hanged version, leave (d)eleted, or leave (u)nresolved.
+ What do you want to do? u
merging content1_content2_content3_content1-tracked
file 'content1_content2_content3_content1-untracked' was deleted in local [working copy] but was modified in other [merge rev].
- What do you want to do?
- use (c)hanged version, leave (d)eleted, or leave (u)nresolved? u
+ You can use (c)hanged version, leave (d)eleted, or leave (u)nresolved.
+ What do you want to do? u
file 'content1_content2_content3_content2-untracked' was deleted in local [working copy] but was modified in other [merge rev].
- What do you want to do?
- use (c)hanged version, leave (d)eleted, or leave (u)nresolved? u
+ You can use (c)hanged version, leave (d)eleted, or leave (u)nresolved.
+ What do you want to do? u
merging content1_content2_content3_content3-tracked
file 'content1_content2_content3_content3-untracked' was deleted in local [working copy] but was modified in other [merge rev].
- What do you want to do?
- use (c)hanged version, leave (d)eleted, or leave (u)nresolved? u
+ You can use (c)hanged version, leave (d)eleted, or leave (u)nresolved.
+ What do you want to do? u
merging content1_content2_content3_content4-tracked
file 'content1_content2_content3_content4-untracked' was deleted in local [working copy] but was modified in other [merge rev].
- What do you want to do?
- use (c)hanged version, leave (d)eleted, or leave (u)nresolved? u
+ You can use (c)hanged version, leave (d)eleted, or leave (u)nresolved.
+ What do you want to do? u
file 'content1_content2_content3_missing-tracked' was deleted in local [working copy] but was modified in other [merge rev].
- What do you want to do?
- use (c)hanged version, leave (d)eleted, or leave (u)nresolved? u
+ You can use (c)hanged version, leave (d)eleted, or leave (u)nresolved.
+ What do you want to do? u
file 'content1_content2_content3_missing-untracked' was deleted in local [working copy] but was modified in other [merge rev].
- What do you want to do?
- use (c)hanged version, leave (d)eleted, or leave (u)nresolved? u
+ You can use (c)hanged version, leave (d)eleted, or leave (u)nresolved.
+ What do you want to do? u
merging content1_content2_missing_content1-tracked
file 'content1_content2_missing_content1-untracked' was deleted in local [working copy] but was modified in other [merge rev].
- What do you want to do?
- use (c)hanged version, leave (d)eleted, or leave (u)nresolved? u
+ You can use (c)hanged version, leave (d)eleted, or leave (u)nresolved.
+ What do you want to do? u
file 'content1_content2_missing_content2-untracked' was deleted in local [working copy] but was modified in other [merge rev].
- What do you want to do?
- use (c)hanged version, leave (d)eleted, or leave (u)nresolved? u
+ You can use (c)hanged version, leave (d)eleted, or leave (u)nresolved.
+ What do you want to do? u
merging content1_content2_missing_content4-tracked
file 'content1_content2_missing_content4-untracked' was deleted in local [working copy] but was modified in other [merge rev].
- What do you want to do?
- use (c)hanged version, leave (d)eleted, or leave (u)nresolved? u
+ You can use (c)hanged version, leave (d)eleted, or leave (u)nresolved.
+ What do you want to do? u
file 'content1_content2_missing_missing-tracked' was deleted in local [working copy] but was modified in other [merge rev].
- What do you want to do?
- use (c)hanged version, leave (d)eleted, or leave (u)nresolved? u
+ You can use (c)hanged version, leave (d)eleted, or leave (u)nresolved.
+ What do you want to do? u
file 'content1_content2_missing_missing-untracked' was deleted in local [working copy] but was modified in other [merge rev].
- What do you want to do?
- use (c)hanged version, leave (d)eleted, or leave (u)nresolved? u
+ You can use (c)hanged version, leave (d)eleted, or leave (u)nresolved.
+ What do you want to do? u
file 'content1_missing_content1_content4-tracked' was deleted in other [merge rev] but was modified in local [working copy].
- What do you want to do?
- use (c)hanged version, (d)elete, or leave (u)nresolved? u
+ You can use (c)hanged version, (d)elete, or leave (u)nresolved.
+ What do you want to do? u
file 'content1_missing_content3_content3-tracked' was deleted in other [merge rev] but was modified in local [working copy].
- What do you want to do?
- use (c)hanged version, (d)elete, or leave (u)nresolved? u
+ You can use (c)hanged version, (d)elete, or leave (u)nresolved.
+ What do you want to do? u
file 'content1_missing_content3_content4-tracked' was deleted in other [merge rev] but was modified in local [working copy].
- What do you want to do?
- use (c)hanged version, (d)elete, or leave (u)nresolved? u
+ You can use (c)hanged version, (d)elete, or leave (u)nresolved.
+ What do you want to do? u
file 'content1_missing_missing_content4-tracked' was deleted in other [merge rev] but was modified in local [working copy].
- What do you want to do?
- use (c)hanged version, (d)elete, or leave (u)nresolved? u
+ You can use (c)hanged version, (d)elete, or leave (u)nresolved.
+ What do you want to do? u
merging missing_content2_content2_content4-tracked
merging missing_content2_content3_content3-tracked
merging missing_content2_content3_content4-tracked
--- a/tests/test-merge-no-file-change.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-merge-no-file-change.t Mon Jul 22 14:00:33 2019 -0400
@@ -137,7 +137,7 @@
$ hg ci --debug -m merge
committing files:
b
- reusing manifest form p1 (listed files actually unchanged)
+ reusing manifest from p1 (listed files actually unchanged)
committing changelog
updating the branch cache
committed changeset 3:c8d50407916ef8a5a97cb6e36ca9bc844a6ee13e
--- a/tests/test-merge-remove.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-merge-remove.t Mon Jul 22 14:00:33 2019 -0400
@@ -103,8 +103,8 @@
$ hg merge -f
file 'bar' was deleted in local [working copy] but was modified in other [merge rev].
- What do you want to do?
- use (c)hanged version, leave (d)eleted, or leave (u)nresolved? u
+ You can use (c)hanged version, leave (d)eleted, or leave (u)nresolved.
+ What do you want to do? u
merging foo1 and foo to foo1
0 files updated, 1 files merged, 0 files removed, 1 files unresolved
use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
--- a/tests/test-merge-subrepos.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-merge-subrepos.t Mon Jul 22 14:00:33 2019 -0400
@@ -112,8 +112,8 @@
> d
> EOF
file 'b' was deleted in local [working copy] but was modified in other [destination].
- What do you want to do?
- use (c)hanged version, leave (d)eleted, or leave (u)nresolved? d
+ You can use (c)hanged version, leave (d)eleted, or leave (u)nresolved.
+ What do you want to do? d
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
XXX: There's a difference between wdir() and '.', so there should be a status.
--- a/tests/test-merge-tools.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-merge-tools.t Mon Jul 22 14:00:33 2019 -0400
@@ -713,7 +713,9 @@
true.executable=cat
# hg update -C 1
$ hg merge -r 2 --config ui.merge=internal:prompt
- keep (l)ocal [working copy], take (o)ther [merge rev], or leave (u)nresolved for f? u
+ file 'f' needs to be resolved.
+ You can keep (l)ocal [working copy], take (o)ther [merge rev], or leave (u)nresolved.
+ What do you want to do? u
0 files updated, 0 files merged, 0 files removed, 1 files unresolved
use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
[1]
@@ -737,7 +739,9 @@
$ hg merge -r 2 --config ui.merge=:prompt --config ui.interactive=True << EOF
> u
> EOF
- keep (l)ocal [working copy], take (o)ther [merge rev], or leave (u)nresolved for f? u
+ file 'f' needs to be resolved.
+ You can keep (l)ocal [working copy], take (o)ther [merge rev], or leave (u)nresolved.
+ What do you want to do? u
0 files updated, 0 files merged, 0 files removed, 1 files unresolved
use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
[1]
@@ -759,7 +763,9 @@
true.executable=cat
# hg update -C 1
$ hg merge -r 2 --config ui.merge=internal:prompt --config ui.interactive=true
- keep (l)ocal [working copy], take (o)ther [merge rev], or leave (u)nresolved for f?
+ file 'f' needs to be resolved.
+ You can keep (l)ocal [working copy], take (o)ther [merge rev], or leave (u)nresolved.
+ What do you want to do?
0 files updated, 0 files merged, 0 files removed, 1 files unresolved
use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
[1]
@@ -772,7 +778,9 @@
# hg resolve --list
U f
$ hg resolve --all --config ui.merge=internal:prompt --config ui.interactive=true
- keep (l)ocal [working copy], take (o)ther [merge rev], or leave (u)nresolved for f?
+ file 'f' needs to be resolved.
+ You can keep (l)ocal [working copy], take (o)ther [merge rev], or leave (u)nresolved.
+ What do you want to do?
[1]
$ aftermerge
# cat f
@@ -785,7 +793,9 @@
U f
$ rm f
$ hg resolve --all --config ui.merge=internal:prompt --config ui.interactive=true
- keep (l)ocal [working copy], take (o)ther [merge rev], or leave (u)nresolved for f?
+ file 'f' needs to be resolved.
+ You can keep (l)ocal [working copy], take (o)ther [merge rev], or leave (u)nresolved.
+ What do you want to do?
[1]
$ aftermerge
# cat f
@@ -796,7 +806,9 @@
# hg resolve --list
U f
$ hg resolve --all --config ui.merge=internal:prompt
- keep (l)ocal [working copy], take (o)ther [merge rev], or leave (u)nresolved for f? u
+ file 'f' needs to be resolved.
+ You can keep (l)ocal [working copy], take (o)ther [merge rev], or leave (u)nresolved.
+ What do you want to do? u
[1]
$ aftermerge
# cat f
@@ -1908,7 +1920,9 @@
tool :other can't handle binary
tool false can't handle binary
no tool found to merge b
- keep (l)ocal [working copy], take (o)ther [merge rev], or leave (u)nresolved for b? u
+ file 'b' needs to be resolved.
+ You can keep (l)ocal [working copy], take (o)ther [merge rev], or leave (u)nresolved.
+ What do you want to do? u
0 files updated, 0 files merged, 0 files removed, 1 files unresolved
use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
[1]
--- a/tests/test-merge-types.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-merge-types.t Mon Jul 22 14:00:33 2019 -0400
@@ -30,7 +30,6 @@
Symlink is local parent, executable is other:
$ hg merge --debug
- searching for copies back to rev 1
resolving manifests
branchmerge: True, force: False, partial: False
ancestor: c334dc3be0da, local: 521a1e40188f+, remote: 3574f3e69b1c
@@ -40,7 +39,9 @@
couldn't find merge tool hgmerge
no tool found to merge a
picked tool ':prompt' for a (binary False symlink True changedelete False)
- keep (l)ocal [working copy], take (o)ther [merge rev], or leave (u)nresolved for a? u
+ file 'a' needs to be resolved.
+ You can keep (l)ocal [working copy], take (o)ther [merge rev], or leave (u)nresolved.
+ What do you want to do? u
0 files updated, 0 files merged, 0 files removed, 1 files unresolved
use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
[1]
@@ -63,7 +64,6 @@
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
$ hg merge --debug --tool :union
- searching for copies back to rev 1
resolving manifests
branchmerge: True, force: False, partial: False
ancestor: c334dc3be0da, local: 3574f3e69b1c+, remote: 521a1e40188f
@@ -86,7 +86,6 @@
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
$ hg merge --debug --tool :merge3
- searching for copies back to rev 1
resolving manifests
branchmerge: True, force: False, partial: False
ancestor: c334dc3be0da, local: 3574f3e69b1c+, remote: 521a1e40188f
@@ -109,7 +108,6 @@
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
$ hg merge --debug --tool :merge-local
- searching for copies back to rev 1
resolving manifests
branchmerge: True, force: False, partial: False
ancestor: c334dc3be0da, local: 3574f3e69b1c+, remote: 521a1e40188f
@@ -131,7 +129,6 @@
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
$ hg merge --debug --tool :merge-other
- searching for copies back to rev 1
resolving manifests
branchmerge: True, force: False, partial: False
ancestor: c334dc3be0da, local: 3574f3e69b1c+, remote: 521a1e40188f
@@ -165,7 +162,6 @@
$ hg up -Cq 0
$ echo data > a
$ HGMERGE= hg up -y --debug --config ui.merge=
- searching for copies back to rev 2
resolving manifests
branchmerge: False, force: False, partial: False
ancestor: c334dc3be0da, local: c334dc3be0da+, remote: 521a1e40188f
@@ -174,7 +170,9 @@
(couldn't find merge tool hgmerge|tool hgmerge can't handle symlinks) (re)
no tool found to merge a
picked tool ':prompt' for a (binary False symlink True changedelete False)
- keep (l)ocal [working copy], take (o)ther [destination], or leave (u)nresolved for a? u
+ file 'a' needs to be resolved.
+ You can keep (l)ocal [working copy], take (o)ther [destination], or leave (u)nresolved.
+ What do you want to do? u
0 files updated, 0 files merged, 0 files removed, 1 files unresolved
use 'hg resolve' to retry unresolved file merges
updated to "521a1e40188f: symlink"
@@ -209,7 +207,9 @@
$ hg merge
tool internal:merge (for pattern f) can't handle symlinks
no tool found to merge f
- keep (l)ocal [working copy], take (o)ther [merge rev], or leave (u)nresolved for f? u
+ file 'f' needs to be resolved.
+ You can keep (l)ocal [working copy], take (o)ther [merge rev], or leave (u)nresolved.
+ What do you want to do? u
0 files updated, 0 files merged, 0 files removed, 1 files unresolved
use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
[1]
@@ -221,7 +221,9 @@
$ hg merge
tool internal:merge (for pattern f) can't handle symlinks
no tool found to merge f
- keep (l)ocal [working copy], take (o)ther [merge rev], or leave (u)nresolved for f? u
+ file 'f' needs to be resolved.
+ You can keep (l)ocal [working copy], take (o)ther [merge rev], or leave (u)nresolved.
+ What do you want to do? u
0 files updated, 0 files merged, 0 files removed, 1 files unresolved
use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
[1]
@@ -248,7 +250,9 @@
$ hg merge
tool internal:merge (for pattern f) can't handle symlinks
no tool found to merge f
- keep (l)ocal [working copy], take (o)ther [merge rev], or leave (u)nresolved for f? u
+ file 'f' needs to be resolved.
+ You can keep (l)ocal [working copy], take (o)ther [merge rev], or leave (u)nresolved.
+ What do you want to do? u
0 files updated, 0 files merged, 0 files removed, 1 files unresolved
use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
[1]
@@ -260,7 +264,9 @@
$ hg merge
tool internal:merge (for pattern f) can't handle symlinks
no tool found to merge f
- keep (l)ocal [working copy], take (o)ther [merge rev], or leave (u)nresolved for f? u
+ file 'f' needs to be resolved.
+ You can keep (l)ocal [working copy], take (o)ther [merge rev], or leave (u)nresolved.
+ What do you want to do? u
0 files updated, 0 files merged, 0 files removed, 1 files unresolved
use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
[1]
@@ -343,13 +349,19 @@
warning: cannot merge flags for c without common ancestor - keeping local flags
tool internal:merge (for pattern d) can't handle symlinks
no tool found to merge d
- keep (l)ocal [working copy], take (o)ther [merge rev], or leave (u)nresolved for d? u
+ file 'd' needs to be resolved.
+ You can keep (l)ocal [working copy], take (o)ther [merge rev], or leave (u)nresolved.
+ What do you want to do? u
tool internal:merge (for pattern f) can't handle symlinks
no tool found to merge f
- keep (l)ocal [working copy], take (o)ther [merge rev], or leave (u)nresolved for f? u
+ file 'f' needs to be resolved.
+ You can keep (l)ocal [working copy], take (o)ther [merge rev], or leave (u)nresolved.
+ What do you want to do? u
tool internal:merge (for pattern h) can't handle symlinks
no tool found to merge h
- keep (l)ocal [working copy], take (o)ther [merge rev], or leave (u)nresolved for h? u
+ file 'h' needs to be resolved.
+ You can keep (l)ocal [working copy], take (o)ther [merge rev], or leave (u)nresolved.
+ What do you want to do? u
warning: conflicts while merging a! (edit, then use 'hg resolve --mark')
warning: conflicts while merging b! (edit, then use 'hg resolve --mark')
warning: conflicts while merging bx! (edit, then use 'hg resolve --mark')
@@ -405,13 +417,19 @@
warning: cannot merge flags for c without common ancestor - keeping local flags
tool internal:merge (for pattern d) can't handle symlinks
no tool found to merge d
- keep (l)ocal [working copy], take (o)ther [merge rev], or leave (u)nresolved for d? u
+ file 'd' needs to be resolved.
+ You can keep (l)ocal [working copy], take (o)ther [merge rev], or leave (u)nresolved.
+ What do you want to do? u
tool internal:merge (for pattern f) can't handle symlinks
no tool found to merge f
- keep (l)ocal [working copy], take (o)ther [merge rev], or leave (u)nresolved for f? u
+ file 'f' needs to be resolved.
+ You can keep (l)ocal [working copy], take (o)ther [merge rev], or leave (u)nresolved.
+ What do you want to do? u
tool internal:merge (for pattern h) can't handle symlinks
no tool found to merge h
- keep (l)ocal [working copy], take (o)ther [merge rev], or leave (u)nresolved for h? u
+ file 'h' needs to be resolved.
+ You can keep (l)ocal [working copy], take (o)ther [merge rev], or leave (u)nresolved.
+ What do you want to do? u
warning: conflicts while merging a! (edit, then use 'hg resolve --mark')
warning: conflicts while merging b! (edit, then use 'hg resolve --mark')
warning: conflicts while merging bx! (edit, then use 'hg resolve --mark')
--- a/tests/test-merge1.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-merge1.t Mon Jul 22 14:00:33 2019 -0400
@@ -44,6 +44,13 @@
commit: 1 unknown (interrupted update)
update: 1 new changesets (update)
phases: 2 draft
+Detect interrupted update by hg status --verbose
+ $ hg status -v
+ ? b/nonempty
+ # The repository is in an unfinished *update* state.
+
+ # To continue: hg update
+
$ rm b/nonempty
--- a/tests/test-merge7.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-merge7.t Mon Jul 22 14:00:33 2019 -0400
@@ -81,7 +81,6 @@
new changesets 40d11a4173a8
(run 'hg heads' to see heads, 'hg merge' to merge)
$ hg merge --debug
- searching for copies back to rev 1
resolving manifests
branchmerge: True, force: False, partial: False
ancestor: 96b70246a118, local: 50c3a7e29886+, remote: 40d11a4173a8
--- a/tests/test-mq-qrefresh-interactive.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-mq-qrefresh-interactive.t Mon Jul 22 14:00:33 2019 -0400
@@ -190,25 +190,29 @@
> EOF
diff --git a/1.txt b/1.txt
2 hunks, 2 lines changed
- examine changes to '1.txt'? [Ynesfdaq?] y
+ examine changes to '1.txt'?
+ (enter ? for help) [Ynesfdaq?] y
@@ -1,3 +1,3 @@
1
-2
+2 2
3
- record change 1/4 to '1.txt'? [Ynesfdaq?] y
+ record change 1/4 to '1.txt'?
+ (enter ? for help) [Ynesfdaq?] y
@@ -3,3 +3,3 @@
3
-4
+4 4
5
- record change 2/4 to '1.txt'? [Ynesfdaq?] n
+ record change 2/4 to '1.txt'?
+ (enter ? for help) [Ynesfdaq?] n
diff --git a/2.txt b/2.txt
1 hunks, 1 lines changed
- examine changes to '2.txt'? [Ynesfdaq?] y
+ examine changes to '2.txt'?
+ (enter ? for help) [Ynesfdaq?] y
@@ -1,5 +1,5 @@
a
@@ -217,11 +221,13 @@
c
d
e
- record change 3/4 to '2.txt'? [Ynesfdaq?] y
+ record change 3/4 to '2.txt'?
+ (enter ? for help) [Ynesfdaq?] y
diff --git a/dir/a.txt b/dir/a.txt
1 hunks, 1 lines changed
- examine changes to 'dir/a.txt'? [Ynesfdaq?] n
+ examine changes to 'dir/a.txt'?
+ (enter ? for help) [Ynesfdaq?] n
After partial qrefresh 'tip'
@@ -290,7 +296,8 @@
> EOF
diff --git a/1.txt b/1.txt
1 hunks, 1 lines changed
- examine changes to '1.txt'? [Ynesfdaq?] y
+ examine changes to '1.txt'?
+ (enter ? for help) [Ynesfdaq?] y
@@ -1,5 +1,5 @@
1
@@ -299,11 +306,13 @@
-4
+4 4
5
- record change 1/2 to '1.txt'? [Ynesfdaq?] y
+ record change 1/2 to '1.txt'?
+ (enter ? for help) [Ynesfdaq?] y
diff --git a/dir/a.txt b/dir/a.txt
1 hunks, 1 lines changed
- examine changes to 'dir/a.txt'? [Ynesfdaq?] y
+ examine changes to 'dir/a.txt'?
+ (enter ? for help) [Ynesfdaq?] y
@@ -1,4 +1,4 @@
-hello world
@@ -311,7 +320,8 @@
someone
up
- record change 2/2 to 'dir/a.txt'? [Ynesfdaq?] y
+ record change 2/2 to 'dir/a.txt'?
+ (enter ? for help) [Ynesfdaq?] y
After final qrefresh 'tip'
--- a/tests/test-mq-subrepo.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-mq-subrepo.t Mon Jul 22 14:00:33 2019 -0400
@@ -270,7 +270,8 @@
$ hg qpush
applying 1
subrepository sub diverged (local revision: b2fdb12cd82b, remote revision: aa037b301eba)
- (M)erge, keep (l)ocal or keep (r)emote? m
+ you can (m)erge, keep (l)ocal or keep (r)emote.
+ what do you want to do? m
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
now at: 1
$ hg status -AS
@@ -303,11 +304,13 @@
% qrecord --config ui.interactive=1 -m0 0.diff
diff --git a/.hgsub b/.hgsub
new file mode 100644
- examine changes to '.hgsub'? [Ynesfdaq?] y
+ examine changes to '.hgsub'?
+ (enter ? for help) [Ynesfdaq?] y
@@ -0,0 +1,1 @@
+sub = sub
- record this change to '.hgsub'? [Ynesfdaq?] y
+ record this change to '.hgsub'?
+ (enter ? for help) [Ynesfdaq?] y
warning: subrepo spec file '.hgsub' not found
warning: subrepo spec file '.hgsub' not found
@@ -331,12 +334,14 @@
% qrecord --config ui.interactive=1 -m1 1.diff
diff --git a/.hgsub b/.hgsub
1 hunks, 1 lines changed
- examine changes to '.hgsub'? [Ynesfdaq?] y
+ examine changes to '.hgsub'?
+ (enter ? for help) [Ynesfdaq?] y
@@ -1,1 +1,2 @@
sub = sub
+sub2 = sub2
- record this change to '.hgsub'? [Ynesfdaq?] y
+ record this change to '.hgsub'?
+ (enter ? for help) [Ynesfdaq?] y
path sub
source sub
@@ -358,7 +363,8 @@
% qrecord --config ui.interactive=1 -m2 2.diff
diff --git a/.hgsub b/.hgsub
deleted file mode 100644
- examine changes to '.hgsub'? [Ynesfdaq?] y
+ examine changes to '.hgsub'?
+ (enter ? for help) [Ynesfdaq?] y
% debugsub should be empty
@@ -374,7 +380,8 @@
% qrecord --config ui.interactive=1 -m3 3.diff
diff --git a/.hgsub b/.hgsub
deleted file mode 100644
- examine changes to '.hgsub'? [Ynesfdaq?] y
+ examine changes to '.hgsub'?
+ (enter ? for help) [Ynesfdaq?] y
% debugsub should be empty
--- a/tests/test-mq.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-mq.t Mon Jul 22 14:00:33 2019 -0400
@@ -811,7 +811,7 @@
$ echo y>y
$ hg add y
$ hg strip tip
- abort: local changes found
+ abort: uncommitted changes
[255]
--force strip with local changes
--- a/tests/test-mv-cp-st-diff.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-mv-cp-st-diff.t Mon Jul 22 14:00:33 2019 -0400
@@ -1624,6 +1624,13 @@
$ echo change > f
$ hg ci -m 'change f'
+Make a rename because we want to track renames. It is also important that the
+faulty linkrev is not only the "start" commit to ensure the linkrev will be
+used.
+
+ $ hg mv f renamed
+ $ hg ci -m renamed
+
Make a second branch, we use a named branch to create a simple commit
that does not touch f.
@@ -1631,31 +1638,28 @@
$ hg branch -q dev
$ hg ci -Aqm dev
-Graft the initial change, as f was untouched, we reuse the same entry and the
-linkrev point to the older branch.
+Graft the initial change and the rename. As f was untouched, we reuse the same
+entry and the linkrev point to the older branch.
$ hg graft -q 'desc(change)'
-
-Make a rename because we want to track renames. It is also important that the
-faulty linkrev is not the "start" commit to ensure the linkrev will be used.
-
- $ hg mv f renamed
- $ hg ci -m renamed
+ $ hg graft -q 'desc(renamed)'
$ hg log -G -T '{rev} {desc}'
- @ 4 renamed
+ @ 5 renamed
+ |
+ o 4 change f
|
- o 3 change f
+ o 3 dev
|
- o 2 dev
- |
+ | o 2 renamed
+ | |
| o 1 change f
|/
o 0 empty f
-The copy tracking should still reach rev 2 (branch creation).
-accessing the parent of 4 (renamed) should not jump use to revision 1.
+The copy tracking should still reach rev 3 (branch creation).
+accessing the parent of 5 (renamed) should not jump use to revision 1.
$ hg diff --git -r 'desc(dev)' -r .
diff --git a/f b/renamed
@@ -1669,11 +1673,11 @@
Check debug output for copy tracing
$ hg status --copies --rev 'desc(dev)' --rev . --config devel.debug.copies=yes --debug
- debug.copies: searching copies from a51f36ab1704 to 7935fd48a8f9
+ debug.copies: searching copies from a51f36ab1704 to 1f4aa1fd627b
debug.copies: search mode: forward
- debug.copies: looking into rename from a51f36ab1704 to 7935fd48a8f9
- debug.copies: search limit: 2
- debug.copies: missing file to search: 1
+ debug.copies: looking into rename from a51f36ab1704 to 1f4aa1fd627b
+ debug.copies: search limit: 3
+ debug.copies: missing files to search: 1
debug.copies: tracing file: renamed
debug.copies: rename of: f
debug.copies: time: * seconds (glob)
@@ -1681,4 +1685,11 @@
f
R f
+Check that merging across the rename works
+
+ $ echo modified >> renamed
+ $ hg co -m 4
+ merging renamed and f to f
+ 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
+
$ cd ..
--- a/tests/test-narrow-patterns.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-narrow-patterns.t Mon Jul 22 14:00:33 2019 -0400
@@ -135,13 +135,11 @@
$ hg tracked --removeexclude dir1/dirA
comparing with ssh://user@dummy/master
searching for changes
- no changes found
saved backup bundle to $TESTTMP/narrow/.hg/strip-backup/*-widen.hg (glob)
adding changesets
adding manifests
adding file changes
added 9 changesets with 6 changes to 6 files
- new changesets *:* (glob)
$ hg tracked
I path:dir1
I path:dir2
@@ -195,13 +193,11 @@
deleting data/dir1/dirA/bar.i (reporevlogstore !)
deleting data/dir1/dirA/bar/0eca1d0cbdaea4651d1d04d71976a6d2d9bfaae5 (reposimplestore !)
deleting data/dir1/dirA/bar/index (reposimplestore !)
- no changes found
saved backup bundle to $TESTTMP/narrow/.hg/strip-backup/*-widen.hg (glob)
adding changesets
adding manifests
adding file changes
added 11 changesets with 7 changes to 7 files
- new changesets *:* (glob)
$ hg tracked
I path:dir1
I path:dir2
@@ -253,13 +249,11 @@
deleting data/dir1/dirA/foo.i (reporevlogstore !)
deleting data/dir1/dirA/foo/162caeb3d55dceb1fee793aa631ac8c73fcb8b5e (reposimplestore !)
deleting data/dir1/dirA/foo/index (reposimplestore !)
- no changes found
saved backup bundle to $TESTTMP/narrow/.hg/strip-backup/*-widen.hg (glob)
adding changesets
adding manifests
adding file changes
added 13 changesets with 8 changes to 8 files
- new changesets *:* (glob)
$ hg tracked
I path:dir1
I path:dir2
@@ -312,13 +306,11 @@
$ hg tracked --removeexclude dir1/dirA
comparing with ssh://user@dummy/master
searching for changes
- no changes found
saved backup bundle to $TESTTMP/narrow/.hg/strip-backup/*-widen.hg (glob)
adding changesets
adding manifests
adding file changes
added 13 changesets with 9 changes to 9 files
- new changesets *:* (glob)
$ hg tracked
I path:dir1
I path:dir2
@@ -389,13 +381,11 @@
$ hg tracked --addinclude dir1
comparing with ssh://user@dummy/master
searching for changes
- no changes found
saved backup bundle to $TESTTMP/narrow2/.hg/strip-backup/*-widen.hg (glob)
adding changesets
adding manifests
adding file changes
added 10 changesets with 6 changes to 6 files
- new changesets *:* (glob)
$ find * | sort
dir1
dir1/bar
--- a/tests/test-narrow-rebase.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-narrow-rebase.t Mon Jul 22 14:00:33 2019 -0400
@@ -1,3 +1,10 @@
+#testcases continuecommand continueflag
+#if continueflag
+ $ cat >> $HGRCPATH <<EOF
+ > [alias]
+ > continue = rebase --continue
+ > EOF
+#endif
$ . "$TESTDIR/narrow-library.sh"
@@ -69,7 +76,7 @@
$ echo modified3 > inside/f1
$ hg resolve -m 2>&1 | grep -v continue:
(no more unresolved files)
- $ hg rebase --continue
+ $ hg continue
rebasing 6:cdce97fbf653 "conflicting inside/f1" (tip)
saved backup bundle to $TESTTMP/narrow/.hg/strip-backup/*-rebase.hg (glob)
--- a/tests/test-narrow-share.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-narrow-share.t Mon Jul 22 14:00:33 2019 -0400
@@ -28,6 +28,9 @@
$ hg clone --narrow ssh://user@dummy/remote main -q \
> --include d1 --include d3 --include d5 --include d7
+Ignore file called "ignored"
+ $ echo ignored > main/.hgignore
+
$ hg share main share
updating working directory
4 files updated, 0 files merged, 0 files removed, 0 files unresolved
@@ -55,15 +58,19 @@
# Make d3/f dirty
$ echo x >> main/d3/f
$ echo y >> main/d3/g
+ $ touch main/d3/ignored
+ $ touch main/d3/untracked
$ hg add main/d3/g
$ hg -R main st
M d3/f
A d3/g
+ ? d3/untracked
# Make d5/f not match the dirstate timestamp even though it's clean
$ sleep 2
$ hg -R main st
M d3/f
A d3/g
+ ? d3/untracked
$ hg -R main debugdirstate --no-dates
n 644 2 set d1/f
n 644 2 set d3/f
@@ -91,6 +98,8 @@
not deleting possibly dirty file d3/f
not deleting possibly dirty file d3/g
not deleting possibly dirty file d5/f
+ not deleting unknown file d3/untracked
+ not deleting ignored file d3/ignored
# d1/f, d3/f, d3/g and d5/f should no longer be reported
$ hg -R main files
main/d7/f
@@ -99,6 +108,8 @@
$ find main/d* -type f | sort
main/d3/f
main/d3/g
+ main/d3/ignored
+ main/d3/untracked
main/d5/f
main/d7/f
@@ -131,6 +142,8 @@
$ hg -R main st --all
M d3/f
? d3/g
+ ? d3/untracked
+ I d3/ignored
C d1/f
C d7/f
--- a/tests/test-narrow-trackedcmd.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-narrow-trackedcmd.t Mon Jul 22 14:00:33 2019 -0400
@@ -145,13 +145,11 @@
looking for local changes to affected paths
deleting data/inside/f.i
deleting meta/inside/00manifest.i (tree !)
- no changes found
saved backup bundle to $TESTTMP/narrow/.hg/strip-backup/*-widen.hg (glob)
adding changesets
adding manifests
adding file changes
added 2 changesets with 0 changes to 0 files
- new changesets *:* (glob)
$ hg tracked
I path:outisde
X path:inside
@@ -166,13 +164,11 @@
$ hg tracked --import-rules specs --addinclude 'wider/'
comparing with ssh://user@dummy/master
searching for changes
- no changes found
saved backup bundle to $TESTTMP/narrow/.hg/strip-backup/*-widen.hg (glob)
adding changesets
adding manifests
adding file changes
added 3 changesets with 1 changes to 1 files
- new changesets *:* (glob)
$ hg tracked
I path:outisde
I path:wider
@@ -211,13 +207,11 @@
$ hg tracked --import-rules ../nspecs
comparing with ssh://user@dummy/master
searching for changes
- no changes found
saved backup bundle to $TESTTMP/narrow/.hg/strip-backup/*-widen.hg (glob)
adding changesets
adding manifests
adding file changes
added 3 changesets with 0 changes to 0 files
- new changesets *:* (glob)
$ cd ..
--- a/tests/test-narrow-update.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-narrow-update.t Mon Jul 22 14:00:33 2019 -0400
@@ -72,5 +72,5 @@
$ hg mv inside/f1 inside/f2
$ hg update -q 'desc("modify outside")'
- $ hg update -q 'desc("initial")'
+ $ hg update -q 'desc("add inside and outside")'
$ hg update -q 'desc("modify inside")'
--- a/tests/test-narrow-widen.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-narrow-widen.t Mon Jul 22 14:00:33 2019 -0400
@@ -95,13 +95,11 @@
$ hg tracked --addinclude widest/f
comparing with ssh://user@dummy/master
searching for changes
- no changes found
saved backup bundle to $TESTTMP/narrow/.hg/strip-backup/*-widen.hg (glob)
adding changesets
adding manifests
adding file changes
added 3 changesets with 2 changes to 2 files
- new changesets *:* (glob)
$ hg tracked
I path:inside
I path:widest/f
@@ -154,13 +152,11 @@
$ hg tracked --addinclude wider
comparing with ssh://user@dummy/master
searching for changes
- no changes found
saved backup bundle to $TESTTMP/narrow/.hg/strip-backup/*-widen.hg (glob)
adding changesets
adding manifests
adding file changes
added 8 changesets with 7 changes to 3 files
- new changesets *:* (glob)
$ hg tracked
I path:inside
I path:wider
@@ -261,13 +257,11 @@
$ hg tracked --addinclude d1
comparing with ssh://user@dummy/upstream
searching for changes
- no changes found
saved backup bundle to $TESTTMP/narrow2/.hg/strip-backup/*-widen.hg (glob)
adding changesets
adding manifests
adding file changes
added 9 changesets with 5 changes to 5 files
- new changesets *:* (glob)
$ hg tracked
I path:d0
I path:d1
@@ -342,7 +336,6 @@
$ hg --config hooks.pretxnchangegroup.bad=false tracked --addinclude d1
comparing with ssh://user@dummy/upstream
searching for changes
- no changes found
saved backup bundle to $TESTTMP/interrupted/.hg/strip-backup/*-widen.hg (glob)
adding changesets
adding manifests
--- a/tests/test-narrow.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-narrow.t Mon Jul 22 14:00:33 2019 -0400
@@ -281,18 +281,20 @@
marked working directory as branch foo
(branches are permanent and global, did you want a bookmark?)
$ hg ci -m empty
+ $ hg log -T "{rev}: {desc} {outsidenarrow}\n"
+ 2: empty
+ 1: add d5/f outsidenarrow
+ 0: add d0/f outsidenarrow
$ hg pull -q
Can widen the empty clone
$ hg tracked --addinclude d0
comparing with ssh://user@dummy/master
searching for changes
- no changes found
saved backup bundle to $TESTTMP/narrow-empty/.hg/strip-backup/*-widen.hg (glob)
adding changesets
adding manifests
adding file changes
added 3 changesets with 1 changes to 1 files
- new changesets *:* (glob)
$ hg tracked
I path:d0
$ hg files
--- a/tests/test-origbackup-conflict.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-origbackup-conflict.t Mon Jul 22 14:00:33 2019 -0400
@@ -129,8 +129,9 @@
b/c: replacing untracked file
getting b/c
creating directory: $TESTTMP/repo/.hg/badorigbackups/b
- abort: $ENOTDIR$: *$TESTTMP/repo/.hg/badorigbackups/b* (glob)
- [255]
- $ cat .hg/badorigbackups
- data
-
+ removing conflicting file: $TESTTMP/repo/.hg/badorigbackups
+ getting d
+ 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ (activating bookmark c1)
+ $ ls .hg/badorigbackups/b
+ c
--- a/tests/test-phabricator.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-phabricator.t Mon Jul 22 14:00:33 2019 -0400
@@ -15,8 +15,8 @@
> hgphab.prefix = phab.mercurial-scm.org
> # When working on the extension and making phabricator interaction
> # changes, edit this to be a real phabricator token. When done, edit
- > # it back, and make sure to also edit your VCR transcripts to match
- > # whatever value you put here.
+ > # it back. The VCR transcripts will be auto-sanitised to replace your real
+ > # token with this value.
> hgphab.phabtoken = cli-hahayouwish
> EOF
$ VCR="$TESTDIR/phabricator"
@@ -32,6 +32,8 @@
Basic phabread:
$ hg phabread --test-vcr "$VCR/phabread-4480.json" D4480 | head
# HG changeset patch
+ # Date 1536771503 0
+ # Parent a5de21c9e3703f8e8eb064bd7d893ff2f703c66a
exchangev2: start to implement pull with wire protocol v2
Wire protocol version 2 will take a substantially different
@@ -39,8 +41,6 @@
is concerned).
This commit establishes a new exchangev2 module for holding
- code related to exchange using wire protocol v2. I could have
- added things to the existing exchange module. But it is already
phabupdate with an accept:
$ hg phabupdate --accept D4564 \
@@ -53,18 +53,18 @@
$ hg ci --addremove -m 'create alpha for phabricator test €'
adding alpha
$ hg phabsend -r . --test-vcr "$VCR/phabsend-create-alpha.json"
- D6054 - created - d386117f30e6: create alpha for phabricator test \xe2\x82\xac (esc)
+ D1190 - created - d386117f30e6: create alpha for phabricator test \xe2\x82\xac (esc)
saved backup bundle to $TESTTMP/repo/.hg/strip-backup/d386117f30e6-24ffe649-phabsend.hg
$ echo more >> alpha
$ HGEDITOR=true hg ci --amend
- saved backup bundle to $TESTTMP/repo/.hg/strip-backup/cb03845d6dd9-870f61a6-amend.hg
+ saved backup bundle to $TESTTMP/repo/.hg/strip-backup/a86ed7d85e86-b7a54f3b-amend.hg
$ echo beta > beta
$ hg ci --addremove -m 'create beta for phabricator test'
adding beta
$ hg phabsend -r ".^::" --test-vcr "$VCR/phabsend-update-alpha-create-beta.json"
- D6054 - updated - 939d862f0318: create alpha for phabricator test \xe2\x82\xac (esc)
- D6055 - created - f55f947ed0f8: create beta for phabricator test
- saved backup bundle to $TESTTMP/repo/.hg/strip-backup/f55f947ed0f8-0d1e502e-phabsend.hg
+ D1190 - updated - d940d39fb603: create alpha for phabricator test \xe2\x82\xac (esc)
+ D1191 - created - 4b2486dfc8c7: create beta for phabricator test
+ saved backup bundle to $TESTTMP/repo/.hg/strip-backup/4b2486dfc8c7-d90584fa-phabsend.hg
$ unset HGENCODING
The amend won't explode after posting a public commit. The local tag is left
@@ -76,13 +76,13 @@
$ echo 'draft change' > alpha
$ hg ci -m 'create draft change for phabricator testing'
$ hg phabsend --amend -r '.^::' --test-vcr "$VCR/phabsend-create-public.json"
- D5544 - created - a56e5ebd77e6: create public change for phabricator testing
- D5545 - created - 6a0ade3e3ec2: create draft change for phabricator testing
- warning: not updating public commit 2:a56e5ebd77e6
- saved backup bundle to $TESTTMP/repo/.hg/strip-backup/6a0ade3e3ec2-aca7d23c-phabsend.hg
+ D1192 - created - 24ffd6bca53a: create public change for phabricator testing
+ D1193 - created - ac331633be79: create draft change for phabricator testing
+ warning: not updating public commit 2:24ffd6bca53a
+ saved backup bundle to $TESTTMP/repo/.hg/strip-backup/ac331633be79-719b961c-phabsend.hg
$ hg tags -v
- tip 3:90532860b5e1
- D5544 2:a56e5ebd77e6 local
+ tip 3:a19f1434f9a5
+ D1192 2:24ffd6bca53a local
$ hg debugcallconduit user.search --test-vcr "$VCR/phab-conduit.json" <<EOF
> {
@@ -107,15 +107,47 @@
Template keywords
$ hg log -T'{rev} {phabreview|json}\n'
- 3 {"id": "D5545", "url": "https://phab.mercurial-scm.org/D5545"}
- 2 {"id": "D5544", "url": "https://phab.mercurial-scm.org/D5544"}
- 1 {"id": "D6055", "url": "https://phab.mercurial-scm.org/D6055"}
- 0 {"id": "D6054", "url": "https://phab.mercurial-scm.org/D6054"}
+ 3 {"id": "D1193", "url": "https://phab.mercurial-scm.org/D1193"}
+ 2 {"id": "D1192", "url": "https://phab.mercurial-scm.org/D1192"}
+ 1 {"id": "D1191", "url": "https://phab.mercurial-scm.org/D1191"}
+ 0 {"id": "D1190", "url": "https://phab.mercurial-scm.org/D1190"}
$ hg log -T'{rev} {if(phabreview, "{phabreview.url} {phabreview.id}")}\n'
- 3 https://phab.mercurial-scm.org/D5545 D5545
- 2 https://phab.mercurial-scm.org/D5544 D5544
- 1 https://phab.mercurial-scm.org/D6055 D6055
- 0 https://phab.mercurial-scm.org/D6054 D6054
+ 3 https://phab.mercurial-scm.org/D1193 D1193
+ 2 https://phab.mercurial-scm.org/D1192 D1192
+ 1 https://phab.mercurial-scm.org/D1191 D1191
+ 0 https://phab.mercurial-scm.org/D1190 D1190
+
+Commenting when phabsending:
+ $ echo comment > comment
+ $ hg ci --addremove -m "create comment for phabricator test"
+ adding comment
+ $ hg phabsend -r . -m "For default branch" --test-vcr "$VCR/phabsend-comment-created.json"
+ D1253 - created - a7ee4bac036a: create comment for phabricator test
+ saved backup bundle to $TESTTMP/repo/.hg/strip-backup/a7ee4bac036a-8009b5a0-phabsend.hg
+ $ echo comment2 >> comment
+ $ hg ci --amend
+ saved backup bundle to $TESTTMP/repo/.hg/strip-backup/81fce7de1b7d-05339e5b-amend.hg
+ $ hg phabsend -r . -m "Address review comments" --test-vcr "$VCR/phabsend-comment-updated.json"
+ D1253 - updated - 1acd4b60af38: create comment for phabricator test
+
+Phabreading a DREV with a local:commits time as a string:
+ $ hg phabread --test-vcr "$VCR/phabread-str-time.json" D1285
+ # HG changeset patch
+ # User test <test>
+ # Date 1562019844 0
+ # Branch default
+ # Node ID da5c8c6bf23a36b6e3af011bc3734460692c23ce
+ # Parent 1f634396406d03e565ed645370e5fecd062cf215
+ test string time
+
+ Differential Revision: https://phab.mercurial-scm.org/D1285
+ diff --git a/test b/test
+ new file mode 100644
+ --- /dev/null
+ +++ b/test
+ @@ * @@ (glob)
+ +test
+
$ cd ..
--- a/tests/test-push-warn.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-push-warn.t Mon Jul 22 14:00:33 2019 -0400
@@ -791,3 +791,57 @@
[255]
$ cd ..
+
+Test regarding pushing of closed branch/branches(Issue6080)
+
+ $ hg init x
+ $ cd x
+ $ hg -q branch a
+ $ echo 0 > foo
+ $ hg -q ci -Am 0
+ $ hg -q up 0
+ $ cd ..
+
+ $ hg -q clone x z
+ $ cd z
+
+When there is a single closed branch
+
+ $ hg -q branch foo
+ $ echo 0 > foo
+ $ hg -q ci -Am 0
+ $ hg ci --close-branch -m 'closing branch foo'
+ $ hg -q up 0
+ $ hg push ../x
+ pushing to ../x
+ searching for changes
+ abort: push creates new remote branches: foo (1 closed)!
+ (use 'hg push --new-branch' to create new remote branches)
+ [255]
+
+When there is more than one closed branches
+ $ hg -q branch bar
+ $ echo 0 > bar
+ $ hg -q ci -Am 0
+ $ hg ci --close-branch -m 'closing branch bar'
+ $ hg -q up 0
+ $ hg push ../x
+ pushing to ../x
+ searching for changes
+ abort: push creates new remote branches: bar, foo (2 closed)!
+ (use 'hg push --new-branch' to create new remote branches)
+ [255]
+
+When there are more than one new branches and not all are closed
+ $ hg -q branch bar1
+ $ echo 0 > bar1
+ $ hg -q ci -Am 0
+ $ hg -q up 0
+ $ hg push ../x
+ pushing to ../x
+ searching for changes
+ abort: push creates new remote branches: bar, bar1, foo (2 closed)!
+ (use 'hg push --new-branch' to create new remote branches)
+ [255]
+
+ $ cd ..
--- a/tests/test-qrecord.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-qrecord.t Mon Jul 22 14:00:33 2019 -0400
@@ -260,25 +260,29 @@
> EOF
diff --git a/1.txt b/1.txt
2 hunks, 2 lines changed
- examine changes to '1.txt'? [Ynesfdaq?] y
+ examine changes to '1.txt'?
+ (enter ? for help) [Ynesfdaq?] y
@@ -1,3 +1,3 @@
1
-2
+2 2
3
- record change 1/4 to '1.txt'? [Ynesfdaq?] y
+ record change 1/4 to '1.txt'?
+ (enter ? for help) [Ynesfdaq?] y
@@ -3,3 +3,3 @@
3
-4
+4 4
5
- record change 2/4 to '1.txt'? [Ynesfdaq?] n
+ record change 2/4 to '1.txt'?
+ (enter ? for help) [Ynesfdaq?] n
diff --git a/2.txt b/2.txt
1 hunks, 1 lines changed
- examine changes to '2.txt'? [Ynesfdaq?] y
+ examine changes to '2.txt'?
+ (enter ? for help) [Ynesfdaq?] y
@@ -1,5 +1,5 @@
a
@@ -287,11 +291,13 @@
c
d
e
- record change 3/4 to '2.txt'? [Ynesfdaq?] y
+ record change 3/4 to '2.txt'?
+ (enter ? for help) [Ynesfdaq?] y
diff --git a/dir/a.txt b/dir/a.txt
1 hunks, 1 lines changed
- examine changes to 'dir/a.txt'? [Ynesfdaq?] n
+ examine changes to 'dir/a.txt'?
+ (enter ? for help) [Ynesfdaq?] n
After qrecord a.patch 'tip'"
@@ -361,7 +367,8 @@
> EOF
diff --git a/1.txt b/1.txt
1 hunks, 1 lines changed
- examine changes to '1.txt'? [Ynesfdaq?] y
+ examine changes to '1.txt'?
+ (enter ? for help) [Ynesfdaq?] y
@@ -1,5 +1,5 @@
1
@@ -370,11 +377,13 @@
-4
+4 4
5
- record change 1/2 to '1.txt'? [Ynesfdaq?] y
+ record change 1/2 to '1.txt'?
+ (enter ? for help) [Ynesfdaq?] y
diff --git a/dir/a.txt b/dir/a.txt
1 hunks, 1 lines changed
- examine changes to 'dir/a.txt'? [Ynesfdaq?] y
+ examine changes to 'dir/a.txt'?
+ (enter ? for help) [Ynesfdaq?] y
@@ -1,4 +1,4 @@
-hello world
@@ -382,7 +391,8 @@
someone
up
- record change 2/2 to 'dir/a.txt'? [Ynesfdaq?] y
+ record change 2/2 to 'dir/a.txt'?
+ (enter ? for help) [Ynesfdaq?] y
After qrecord b.patch 'tip'
--- a/tests/test-rebase-abort.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-rebase-abort.t Mon Jul 22 14:00:33 2019 -0400
@@ -1,3 +1,6 @@
+#testcases abortcommand abortflag
+#testcases continuecommand continueflag
+
$ cat >> $HGRCPATH <<EOF
> [extensions]
> rebase=
@@ -9,6 +12,19 @@
> tglog = log -G --template "{rev}:{phase} '{desc}' {branches}\n"
> EOF
+#if abortflag
+ $ cat >> $HGRCPATH <<EOF
+ > [alias]
+ > abort = rebase --abort
+ > EOF
+#endif
+
+#if continueflag
+ $ cat >> $HGRCPATH <<EOF
+ > [alias]
+ > continue = rebase --continue
+ > EOF
+#endif
$ hg init a
$ cd a
@@ -114,7 +130,13 @@
Abort (should clear out unsupported merge state):
- $ hg rebase --abort
+#if abortcommand
+when in dry-run mode
+ $ hg abort --dry-run
+ rebase in progress, will be aborted
+#endif
+
+ $ hg abort
saved backup bundle to $TESTTMP/a/.hg/strip-backup/3e046f2ecedb-6beef7d5-backup.hg
rebase aborted
$ hg debugmergestate
@@ -148,13 +170,13 @@
$ hg --config extensions.mq= strip --quiet "destination()"
$ mv .hg/rebasestate.back .hg/rebasestate
- $ hg rebase --continue
+ $ hg continue
abort: cannot continue inconsistent rebase
(use "hg rebase --abort" to clear broken state)
[255]
$ hg summary | grep '^rebase: '
rebase: (use "hg rebase --abort" to clear broken state)
- $ hg rebase --abort
+ $ hg abort
rebase aborted (no revision is removed, only broken state is cleared)
$ cd ..
@@ -271,7 +293,7 @@
warning: conflicts while merging c! (edit, then use 'hg resolve --mark')
unresolved conflicts (see hg resolve, then hg rebase --continue)
[1]
- $ hg rebase --abort
+ $ hg abort
rebase aborted
$ hg log -G --template "{rev} {desc} {bookmarks}"
@ 3 C foo
@@ -324,7 +346,7 @@
$ cat a
new
- $ hg rebase --abort
+ $ hg abort
rebase aborted
$ cat a
new
@@ -405,7 +427,7 @@
(use 'hg rebase --continue' or 'hg rebase --abort')
[255]
- $ hg rebase --abort
+ $ hg abort
saved backup bundle to $TESTTMP/interrupted/.hg/strip-backup/3d8812cf300d-93041a90-backup.hg
rebase aborted
$ hg log -G --template "{rev} {desc} {bookmarks}"
@@ -456,7 +478,7 @@
rebasing 2:e4ea5cdc9789 "conflicting 1"
unresolved conflicts (see hg resolve, then hg rebase --continue)
[1]
- $ hg rebase --abort
+ $ hg abort
rebase aborted
$ hg summary
parent: 3:b16646383533 tip
@@ -497,7 +519,7 @@
warning: conflicts while merging root! (edit, then use 'hg resolve --mark')
unresolved conflicts (see hg resolve, then hg rebase --continue)
[1]
- $ hg rebase --abort
+ $ hg abort
rebase aborted
$ cd ..
--- a/tests/test-rebase-conflicts.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-rebase-conflicts.t Mon Jul 22 14:00:33 2019 -0400
@@ -82,6 +82,7 @@
# To continue: hg rebase --continue
# To abort: hg rebase --abort
+ # To stop: hg rebase --stop
Try to continue without solving the conflict:
@@ -248,9 +249,6 @@
getting f1.txt
merge against 9:e31216eec445
detach base 8:8e4e2c1a07ae
- searching for copies back to rev 3
- unmatched files in other (from topological common ancestor):
- f2.txt
resolving manifests
branchmerge: True, force: True, partial: False
ancestor: 8e4e2c1a07ae, local: 4bc80088dc6b+, remote: e31216eec445
@@ -268,9 +266,6 @@
already in destination
merge against 10:2f2496ddf49d
detach base 9:e31216eec445
- searching for copies back to rev 3
- unmatched files in other (from topological common ancestor):
- f2.txt
resolving manifests
branchmerge: True, force: True, partial: False
ancestor: e31216eec445, local: 19c888675e13+, remote: 2f2496ddf49d
@@ -329,8 +324,8 @@
bundle2-input-part: total payload size 24
bundle2-input-bundle: 2 parts total
updating the branch cache
- invalid branchheads cache (served): tip differs
- invalid branchheads cache (served.hidden): tip differs
+ invalid branch cache (served): tip differs
+ invalid branch cache (served.hidden): tip differs
rebase completed
Test minimization of merge conflicts
--- a/tests/test-rebase-inmemory.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-rebase-inmemory.t Mon Jul 22 14:00:33 2019 -0400
@@ -744,7 +744,7 @@
$ hg tglog
@ 6: 676538af172d 'untracked rename of d to e'
|
- | * 5: 71cb43376053 'merge'
+ | * 5: 574d92ad16fc 'merge'
| |\
| | x 4: 2c8b5dad7956 'rename d to e'
| | |
@@ -758,9 +758,67 @@
$ hg rebase -b 5 -d tip
rebasing 3:ca58782ad1e4 "b"
- rebasing 5:71cb43376053 "merge"
- note: not rebasing 5:71cb43376053 "merge", its destination already has all its changes
+ rebasing 5:574d92ad16fc "merge"
+ note: not rebasing 5:574d92ad16fc "merge", its destination already has all its changes
+
+ $ cd ..
+
+Test rebasing a commit with copy information
+
+ $ hg init rebase-rename
+ $ cd rebase-rename
+ $ echo a > a
+ $ hg ci -Aqm 'add a'
+ $ echo a2 > a
+ $ hg ci -m 'modify a'
+ $ hg co -q 0
+ $ hg mv a b
+ $ hg ci -qm 'rename a to b'
+ $ hg rebase -d 1
+ rebasing 2:b977edf6f839 "rename a to b" (tip)
+ merging a and b to b
+ saved backup bundle to $TESTTMP/rebase-rename/.hg/strip-backup/b977edf6f839-0864f570-rebase.hg
+ $ hg st --copies --change .
+ A b
+ a
+ R a
+ $ cd ..
+
+Test rebasing a commit with copy information, where the target is empty
+ $ hg init rebase-rename-empty
+ $ cd rebase-rename-empty
+ $ echo a > a
+ $ hg ci -Aqm 'add a'
+ $ cat > a
+ $ hg ci -m 'make a empty'
+ $ hg co -q 0
+ $ hg mv a b
+ $ hg ci -qm 'rename a to b'
+ $ hg rebase -d 1
+ rebasing 2:b977edf6f839 "rename a to b" (tip)
+ merging a and b to b
+ saved backup bundle to $TESTTMP/rebase-rename-empty/.hg/strip-backup/b977edf6f839-0864f570-rebase.hg
+ $ hg st --copies --change .
+ A b
+ a
+ R a
+ $ cd ..
+Rebase across a copy with --collapse
+
+ $ hg init rebase-rename-collapse
+ $ cd rebase-rename-collapse
+ $ echo a > a
+ $ hg ci -Aqm 'add a'
+ $ hg mv a b
+ $ hg ci -m 'rename a to b'
+ $ hg co -q 0
+ $ echo a2 > a
+ $ hg ci -qm 'modify a'
+ $ hg rebase -r . -d 1 --collapse
+ rebasing 2:41c4ea50d4cf "modify a" (tip)
+ merging b and a to b
+ saved backup bundle to $TESTTMP/rebase-rename-collapse/.hg/strip-backup/41c4ea50d4cf-b90b7994-rebase.hg
$ cd ..
Test rebasing when the file we are merging in destination is empty
--- a/tests/test-rebase-legacy.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-rebase-legacy.t Mon Jul 22 14:00:33 2019 -0400
@@ -1,3 +1,4 @@
+#testcases continuecommand continueflag
Test rebase --continue with rebasestate written by legacy client
$ cat >> $HGRCPATH <<EOF
@@ -6,6 +7,13 @@
> drawdag=$TESTDIR/drawdag.py
> EOF
+#if continueflag
+ $ cat >> $HGRCPATH <<EOF
+ > [alias]
+ > continue = rebase --continue
+ > EOF
+#endif
+
$ hg init
$ hg debugdrawdag <<'EOF'
> D H
@@ -40,7 +48,12 @@
> 6582e6951a9c48c236f746f186378e36f59f4928:0000000000000000000000000000000000000000
> EOF
- $ hg rebase --continue
+#if continuecommand
+ $ hg continue --dry-run
+ rebase in progress, will be resumed
+#endif
+
+ $ hg continue
rebasing 4:c1e6b162678d "B" (B)
rebasing 8:6f7a236de685 "D" (D)
rebasing 2:de008c61a447 "E" (E)
--- a/tests/test-rebase-mq-skip.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-rebase-mq-skip.t Mon Jul 22 14:00:33 2019 -0400
@@ -1,3 +1,4 @@
+#testcases continuecommand continueflag
This emulates the effects of an hg pull --rebase in which the remote repo
already has one local mq patch
@@ -13,6 +14,12 @@
> tglog = log -G --template "{rev}: {node|short} '{desc}' tags: {tags}\n"
> EOF
+#if continueflag
+ $ cat >> $HGRCPATH <<EOF
+ > [alias]
+ > continue = rebase --continue
+ > EOF
+#endif
$ hg init a
$ cd a
@@ -155,7 +162,7 @@
(no more unresolved files)
continue: hg rebase --continue
- $ hg rebase --continue
+ $ hg continue
already rebased 1:b4bffa6e4776 "r1" (qbase r1) as 057f55ff8f44
already rebased 2:c0fd129beb01 "r2" (r2) as 1660ab13ce9a
already rebased 3:6ff5b8feed8e "r3" (r3) as 1660ab13ce9a
--- a/tests/test-rebase-newancestor.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-rebase-newancestor.t Mon Jul 22 14:00:33 2019 -0400
@@ -108,7 +108,7 @@
$ hg tglog
@ 7: e08089805d82 'default: f-other stuff'
|
- | o 6: 9455ee510502 'dev: merge default' dev
+ | o 6: 010ced67e558 'dev: merge default' dev
|/|
o | 5: 462860db70a1 'default: remove f-default'
| |
@@ -134,12 +134,12 @@
rebasing 2:ec2c14fb2984 "dev: f-dev stuff"
rebasing 4:4b019212aaf6 "dev: merge default"
file 'f-default' was deleted in local [dest] but was modified in other [source].
- What do you want to do?
- use (c)hanged version, leave (d)eleted, or leave (u)nresolved? c
- rebasing 6:9455ee510502 "dev: merge default"
- saved backup bundle to $TESTTMP/ancestor-merge/.hg/strip-backup/1d1a643d390e-43e9e04b-rebase.hg
+ You can use (c)hanged version, leave (d)eleted, or leave (u)nresolved.
+ What do you want to do? c
+ rebasing 6:010ced67e558 "dev: merge default"
+ saved backup bundle to $TESTTMP/ancestor-merge/.hg/strip-backup/1d1a643d390e-4a6f6d17-rebase.hg
$ hg tglog
- o 6: fbc098e72227 'dev: merge default'
+ o 6: de147e4f69cf 'dev: merge default'
|
o 5: eda7b7f46f5d 'dev: merge default'
|
@@ -164,12 +164,12 @@
rebasing 2:ec2c14fb2984 "dev: f-dev stuff"
rebasing 4:4b019212aaf6 "dev: merge default"
file 'f-default' was deleted in local [dest] but was modified in other [source].
- What do you want to do?
- use (c)hanged version, leave (d)eleted, or leave (u)nresolved? c
- rebasing 6:9455ee510502 "dev: merge default"
- saved backup bundle to $TESTTMP/ancestor-merge-2/.hg/strip-backup/ec2c14fb2984-62d0b222-rebase.hg
+ You can use (c)hanged version, leave (d)eleted, or leave (u)nresolved.
+ What do you want to do? c
+ rebasing 6:010ced67e558 "dev: merge default"
+ saved backup bundle to $TESTTMP/ancestor-merge-2/.hg/strip-backup/ec2c14fb2984-827d7a44-rebase.hg
$ hg tglog
- o 7: fbc098e72227 'dev: merge default'
+ o 7: de147e4f69cf 'dev: merge default'
|
o 6: eda7b7f46f5d 'dev: merge default'
|
--- a/tests/test-rebase-transaction.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-rebase-transaction.t Mon Jul 22 14:00:33 2019 -0400
@@ -1,3 +1,4 @@
+#testcases continuecommand continueflag
Rebasing using a single transaction
$ cat >> $HGRCPATH <<EOF
@@ -15,6 +16,13 @@
> tglog = log -G --template "{rev}: {desc}"
> EOF
+#if continueflag
+ $ cat >> $HGRCPATH <<EOF
+ > [alias]
+ > continue = rebase --continue
+ > EOF
+#endif
+
Check that a simple rebase works
$ hg init simple && cd simple
@@ -123,7 +131,7 @@
$ hg resolve -m
(no more unresolved files)
continue: hg rebase --continue
- $ hg rebase --continue
+ $ hg continue
already rebased 1:112478962961 "B" (B) as 79bc8f4973ce
rebasing 3:c26739dbe603 "C" (C)
rebasing 5:d24bb333861c "D" (D tip)
@@ -177,7 +185,7 @@
|/
o 0: A
- $ hg rebase --continue
+ $ hg continue
rebasing 1:112478962961 "B" (B)
rebasing 3:26805aba1e60 "C" (C)
rebasing 5:f585351a92f8 "D" (D tip)
--- a/tests/test-remotefilelog-gcrepack.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-remotefilelog-gcrepack.t Mon Jul 22 14:00:33 2019 -0400
@@ -43,7 +43,7 @@
$ sleep 0.5
$ hg debugwaitonrepack >/dev/null 2>%1
- $ find $CACHEDIR | sort | grep ".datapack\|.histpack"
+ $ find $CACHEDIR | sort | egrep ".datapack|.histpack"
$TESTTMP/hgcache/master/packs/7bcd2d90b99395ca43172a0dd24e18860b2902f9.histpack
$TESTTMP/hgcache/master/packs/dc8f8fdc76690ce27791ce9f53a18da379e50d37.datapack
@@ -76,7 +76,7 @@
$ sleep 0.5
$ hg debugwaitonrepack >/dev/null 2>%1
- $ find $CACHEDIR | sort | grep ".datapack\|.histpack"
+ $ find $CACHEDIR | sort | egrep ".datapack|.histpack"
$TESTTMP/hgcache/master/packs/7bcd2d90b99395ca43172a0dd24e18860b2902f9.histpack
$TESTTMP/hgcache/master/packs/a4e1d094ec2aee8a08a4d6d95a13c634cc7d7394.datapack
@@ -104,7 +104,7 @@
$ sleep 0.5
$ hg debugwaitonrepack >/dev/null 2>%1
- $ find $CACHEDIR | sort | grep ".datapack\|.histpack"
+ $ find $CACHEDIR | sort | egrep ".datapack|.histpack"
$TESTTMP/hgcache/master/packs/7bcd2d90b99395ca43172a0dd24e18860b2902f9.histpack
$TESTTMP/hgcache/master/packs/dc8f8fdc76690ce27791ce9f53a18da379e50d37.datapack
@@ -138,7 +138,7 @@
$ sleep 0.5
$ hg debugwaitonrepack >/dev/null 2>%1
- $ find $CACHEDIR | sort | grep ".datapack\|.histpack"
+ $ find $CACHEDIR | sort | egrep ".datapack|.histpack"
$TESTTMP/hgcache/master/packs/7bcd2d90b99395ca43172a0dd24e18860b2902f9.histpack
$TESTTMP/hgcache/master/packs/dc8f8fdc76690ce27791ce9f53a18da379e50d37.datapack
--- a/tests/test-remotefilelog-sparse.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-remotefilelog-sparse.t Mon Jul 22 14:00:33 2019 -0400
@@ -98,12 +98,5 @@
$ clearcache
$ hg prefetch -r '. + .^' -I x -I z
4 files fetched over 1 fetches - (4 misses, 0.00% hit ratio) over * (glob)
-Originally this was testing that the rebase doesn't fetch pointless
-blobs. Right now it fails because core's sparse can't load a spec from
-the working directory. Presumably there's a fix, but I'm not sure what it is.
$ hg rebase -d 2 --keep
rebasing 1:876b1317060d "x2" (foo)
- transaction abort!
- rollback completed
- abort: cannot parse sparse patterns from working directory
- [255]
--- a/tests/test-rename-dir-merge.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-rename-dir-merge.t Mon Jul 22 14:00:33 2019 -0400
@@ -24,7 +24,6 @@
created new head
$ hg merge --debug 1
- searching for copies back to rev 1
unmatched files in local:
a/c
unmatched files in other:
@@ -70,7 +69,6 @@
$ hg co -C 1
0 files updated, 0 files merged, 1 files removed, 0 files unresolved
$ hg merge --debug 2
- searching for copies back to rev 1
unmatched files in local:
b/a
b/b
--- a/tests/test-rename-merge1.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-rename-merge1.t Mon Jul 22 14:00:33 2019 -0400
@@ -22,7 +22,6 @@
created new head
$ hg merge -y --debug
- searching for copies back to rev 1
unmatched files in local:
c2
unmatched files in other:
@@ -76,7 +75,7 @@
$ hg cp b b3
$ hg cp b b4
$ hg ci -A -m 'copy b twice'
- $ hg up eb92d88a9712
+ $ hg up '.^'
0 files updated, 0 files merged, 2 files removed, 0 files unresolved
$ hg up
2 files updated, 0 files merged, 0 files removed, 0 files unresolved
@@ -88,7 +87,7 @@
$ hg cp b b3
$ hg mv b b4
$ hg ci -A -m 'divergent renames in same changeset'
- $ hg up c761c6948de0
+ $ hg up '.^'
1 files updated, 0 files merged, 2 files removed, 0 files unresolved
$ hg up
2 files updated, 0 files merged, 1 files removed, 0 files unresolved
@@ -168,7 +167,6 @@
$ hg commit -m "deleted file"
created new head
$ hg merge --debug
- searching for copies back to rev 1
unmatched files in other:
newfile
all copies found (* = to merge, ! = divergent, % = renamed and deleted):
--- a/tests/test-rename-merge2.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-rename-merge2.t Mon Jul 22 14:00:33 2019 -0400
@@ -68,7 +68,6 @@
> hg add $2 2> /dev/null
> fi
> }
- $ uc() { up $1; hg cp $1 $2; } # update + copy
$ um() { up $1; hg mv $1 $2; }
$ nc() { hg cp $1 $2; } # just copy
$ nm() { hg mv $1 $2; } # just move
@@ -77,7 +76,6 @@
--------------
test L:up a R:nc a b W: - 1 get local a to b
--------------
- searching for copies back to rev 1
unmatched files in other:
b
all copies found (* = to merge, ! = divergent, % = renamed and deleted):
@@ -116,7 +114,6 @@
--------------
test L:nc a b R:up a W: - 2 get rem change to a and b
--------------
- searching for copies back to rev 1
unmatched files in local:
b
all copies found (* = to merge, ! = divergent, % = renamed and deleted):
@@ -156,7 +153,6 @@
--------------
test L:up a R:nm a b W: - 3 get local a change to b, remove a
--------------
- searching for copies back to rev 1
unmatched files in other:
b
all copies found (* = to merge, ! = divergent, % = renamed and deleted):
@@ -195,7 +191,6 @@
--------------
test L:nm a b R:up a W: - 4 get remote change to b
--------------
- searching for copies back to rev 1
unmatched files in local:
b
all copies found (* = to merge, ! = divergent, % = renamed and deleted):
@@ -233,7 +228,6 @@
--------------
test L: R:nc a b W: - 5 get b
--------------
- searching for copies back to rev 1
unmatched files in other:
b
all copies found (* = to merge, ! = divergent, % = renamed and deleted):
@@ -266,7 +260,6 @@
--------------
test L:nc a b R: W: - 6 nothing
--------------
- searching for copies back to rev 1
unmatched files in local:
b
all copies found (* = to merge, ! = divergent, % = renamed and deleted):
@@ -298,7 +291,6 @@
--------------
test L: R:nm a b W: - 7 get b
--------------
- searching for copies back to rev 1
unmatched files in other:
b
all copies found (* = to merge, ! = divergent, % = renamed and deleted):
@@ -332,7 +324,6 @@
--------------
test L:nm a b R: W: - 8 nothing
--------------
- searching for copies back to rev 1
unmatched files in local:
b
all copies found (* = to merge, ! = divergent, % = renamed and deleted):
@@ -363,9 +354,9 @@
--------------
test L:um a b R:um a b W: - 9 do merge with ancestor in a
--------------
- searching for copies back to rev 1
- unmatched files new in both:
- b
+ all copies found (* = to merge, ! = divergent, % = renamed and deleted):
+ src: 'a' -> dst: 'b' *
+ checking for directory renames
resolving manifests
branchmerge: True, force: False, partial: False
ancestor: 924404dff337, local: 62e7bf090eba+, remote: 49b6d8032493
@@ -404,7 +395,6 @@
--------------
test L:nm a b R:nm a c W: - 11 get c, keep b
--------------
- searching for copies back to rev 1
unmatched files in local:
b
unmatched files in other:
@@ -443,9 +433,9 @@
--------------
test L:nc a b R:up b W: - 12 merge b no ancestor
--------------
- searching for copies back to rev 1
- unmatched files new in both:
- b
+ all copies found (* = to merge, ! = divergent, % = renamed and deleted):
+ src: 'a' -> dst: 'b'
+ checking for directory renames
resolving manifests
branchmerge: True, force: False, partial: False
ancestor: 924404dff337, local: 86a2aa42fc76+, remote: af30c7647fc7
@@ -482,9 +472,9 @@
--------------
test L:up b R:nm a b W: - 13 merge b no ancestor
--------------
- searching for copies back to rev 1
- unmatched files new in both:
- b
+ all copies found (* = to merge, ! = divergent, % = renamed and deleted):
+ src: 'a' -> dst: 'b'
+ checking for directory renames
resolving manifests
branchmerge: True, force: False, partial: False
ancestor: 924404dff337, local: 59318016310c+, remote: bdb19105162a
@@ -522,9 +512,9 @@
--------------
test L:nc a b R:up a b W: - 14 merge b no ancestor
--------------
- searching for copies back to rev 1
- unmatched files new in both:
- b
+ all copies found (* = to merge, ! = divergent, % = renamed and deleted):
+ src: 'a' -> dst: 'b'
+ checking for directory renames
resolving manifests
branchmerge: True, force: False, partial: False
ancestor: 924404dff337, local: 86a2aa42fc76+, remote: 8dbce441892a
@@ -562,9 +552,9 @@
--------------
test L:up b R:nm a b W: - 15 merge b no ancestor, remove a
--------------
- searching for copies back to rev 1
- unmatched files new in both:
- b
+ all copies found (* = to merge, ! = divergent, % = renamed and deleted):
+ src: 'a' -> dst: 'b'
+ checking for directory renames
resolving manifests
branchmerge: True, force: False, partial: False
ancestor: 924404dff337, local: 59318016310c+, remote: bdb19105162a
@@ -602,9 +592,9 @@
--------------
test L:nc a b R:up a b W: - 16 get a, merge b no ancestor
--------------
- searching for copies back to rev 1
- unmatched files new in both:
- b
+ all copies found (* = to merge, ! = divergent, % = renamed and deleted):
+ src: 'a' -> dst: 'b'
+ checking for directory renames
resolving manifests
branchmerge: True, force: False, partial: False
ancestor: 924404dff337, local: 86a2aa42fc76+, remote: 8dbce441892a
@@ -642,9 +632,9 @@
--------------
test L:up a b R:nc a b W: - 17 keep a, merge b no ancestor
--------------
- searching for copies back to rev 1
- unmatched files new in both:
- b
+ all copies found (* = to merge, ! = divergent, % = renamed and deleted):
+ src: 'a' -> dst: 'b'
+ checking for directory renames
resolving manifests
branchmerge: True, force: False, partial: False
ancestor: 924404dff337, local: 0b76e65c8289+, remote: 4ce40f5aca24
@@ -681,9 +671,9 @@
--------------
test L:nm a b R:up a b W: - 18 merge b no ancestor
--------------
- searching for copies back to rev 1
- unmatched files new in both:
- b
+ all copies found (* = to merge, ! = divergent, % = renamed and deleted):
+ src: 'a' -> dst: 'b'
+ checking for directory renames
resolving manifests
branchmerge: True, force: False, partial: False
ancestor: 924404dff337, local: 02963e448370+, remote: 8dbce441892a
@@ -693,8 +683,8 @@
a: prompt deleted/changed -> m (premerge)
picked tool ':prompt' for a (binary False symlink False changedelete True)
file 'a' was deleted in local [working copy] but was modified in other [merge rev].
- What do you want to do?
- use (c)hanged version, leave (d)eleted, or leave (u)nresolved? u
+ You can use (c)hanged version, leave (d)eleted, or leave (u)nresolved.
+ What do you want to do? u
b: both created -> m (premerge)
picked tool '* ../merge' for b (binary False symlink False changedelete False) (glob)
merging b
@@ -726,9 +716,9 @@
--------------
test L:up a b R:nm a b W: - 19 merge b no ancestor, prompt remove a
--------------
- searching for copies back to rev 1
- unmatched files new in both:
- b
+ all copies found (* = to merge, ! = divergent, % = renamed and deleted):
+ src: 'a' -> dst: 'b'
+ checking for directory renames
resolving manifests
branchmerge: True, force: False, partial: False
ancestor: 924404dff337, local: 0b76e65c8289+, remote: bdb19105162a
@@ -739,8 +729,8 @@
a: prompt changed/deleted -> m (premerge)
picked tool ':prompt' for a (binary False symlink False changedelete True)
file 'a' was deleted in other [merge rev] but was modified in local [working copy].
- What do you want to do?
- use (c)hanged version, (d)elete, or leave (u)nresolved? u
+ You can use (c)hanged version, (d)elete, or leave (u)nresolved.
+ What do you want to do? u
b: both created -> m (premerge)
picked tool '* ../merge' for b (binary False symlink False changedelete False) (glob)
merging b
@@ -772,7 +762,6 @@
--------------
test L:up a R:um a b W: - 20 merge a and b to b, remove a
--------------
- searching for copies back to rev 1
unmatched files in other:
b
all copies found (* = to merge, ! = divergent, % = renamed and deleted):
@@ -815,7 +804,6 @@
--------------
test L:um a b R:up a W: - 21 merge a and b to b
--------------
- searching for copies back to rev 1
unmatched files in local:
b
all copies found (* = to merge, ! = divergent, % = renamed and deleted):
@@ -860,7 +848,6 @@
--------------
test L:nm a b R:up a c W: - 23 get c, keep b
--------------
- searching for copies back to rev 1
unmatched files in local:
b
unmatched files in other:
@@ -941,7 +928,6 @@
$ echo m > 7/f
$ echo m > 8/f
$ hg merge -f --tool internal:dump -v --debug -r2 | sed '/^resolving manifests/,$d' 2> /dev/null
- searching for copies back to rev 1
unmatched files in local:
5/g
6/g
@@ -949,10 +935,8 @@
3/g
4/g
7/f
- unmatched files new in both:
- 0/f
- 1/g
all copies found (* = to merge, ! = divergent, % = renamed and deleted):
+ src: '1/f' -> dst: '1/g' *
src: '3/f' -> dst: '3/g' *
src: '4/f' -> dst: '4/g' *
src: '5/f' -> dst: '5/g' *
--- a/tests/test-resolve.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-resolve.t Mon Jul 22 14:00:33 2019 -0400
@@ -649,8 +649,8 @@
$ hg merge -r 1
file 'file1' was deleted in local [working copy] but was modified in other [merge rev].
- What do you want to do?
- use (c)hanged version, leave (d)eleted, or leave (u)nresolved? u
+ You can use (c)hanged version, leave (d)eleted, or leave (u)nresolved.
+ What do you want to do? u
0 files updated, 0 files merged, 0 files removed, 1 files unresolved
use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
[1]
@@ -678,8 +678,8 @@
$ hg resolve --unmark file1
$ echo 'd' | hg resolve file1 --config ui.interactive=1
file 'file1' was deleted in local [working copy] but was modified in other [merge rev].
- What do you want to do?
- use (c)hanged version, leave (d)eleted, or leave (u)nresolved? d
+ You can use (c)hanged version, leave (d)eleted, or leave (u)nresolved.
+ What do you want to do? d
(no more unresolved files)
$ hg resolve --list
R file1
@@ -694,8 +694,8 @@
$ hg resolve --unmark file1
$ hg resolve file1
file 'file1' was deleted in local [working copy] but was modified in other [merge rev].
- What do you want to do?
- use (c)hanged version, leave (d)eleted, or leave (u)nresolved? u
+ You can use (c)hanged version, leave (d)eleted, or leave (u)nresolved.
+ What do you want to do? u
[1]
$ [ -f file1 ] || echo "File does not exist?"
$ hg resolve --list
@@ -708,8 +708,8 @@
$ hg resolve --unmark file1
$ hg resolve file1
file 'file1' was deleted in local [working copy] but was modified in other [merge rev].
- What do you want to do?
- use (c)hanged version, leave (d)eleted, or leave (u)nresolved? u
+ You can use (c)hanged version, leave (d)eleted, or leave (u)nresolved.
+ What do you want to do? u
[1]
$ [ -f file1 ] || echo "File does not exist?"
$ hg resolve --list
@@ -726,8 +726,8 @@
$ hg update -qCr 1
$ hg merge -r 2
file 'file1' was deleted in other [merge rev] but was modified in local [working copy].
- What do you want to do?
- use (c)hanged version, (d)elete, or leave (u)nresolved? u
+ You can use (c)hanged version, (d)elete, or leave (u)nresolved.
+ What do you want to do? u
0 files updated, 0 files merged, 0 files removed, 1 files unresolved
use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
[1]
@@ -755,8 +755,8 @@
$ hg resolve --unmark file1
$ echo 'd' | hg resolve file1 --config ui.interactive=1
file 'file1' was deleted in other [merge rev] but was modified in local [working copy].
- What do you want to do?
- use (c)hanged version, (d)elete, or leave (u)nresolved? d
+ You can use (c)hanged version, (d)elete, or leave (u)nresolved.
+ What do you want to do? d
(no more unresolved files)
$ hg resolve --list
R file1
@@ -771,8 +771,8 @@
$ hg resolve --unmark file1
$ hg resolve file1
file 'file1' was deleted in other [merge rev] but was modified in local [working copy].
- What do you want to do?
- use (c)hanged version, (d)elete, or leave (u)nresolved? u
+ You can use (c)hanged version, (d)elete, or leave (u)nresolved.
+ What do you want to do? u
[1]
$ [ -f file1 ] || echo "File does not exist?"
$ hg resolve --list
@@ -785,8 +785,8 @@
$ hg resolve --unmark file1
$ hg resolve file1
file 'file1' was deleted in other [merge rev] but was modified in local [working copy].
- What do you want to do?
- use (c)hanged version, (d)elete, or leave (u)nresolved? u
+ You can use (c)hanged version, (d)elete, or leave (u)nresolved.
+ What do you want to do? u
[1]
$ [ -f file1 ] || echo "File does not exist?"
$ hg resolve --list
--- a/tests/test-revert-interactive.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-revert-interactive.t Mon Jul 22 14:00:33 2019 -0400
@@ -55,7 +55,8 @@
removing folder1/i
diff --git a/f b/f
2 hunks, 2 lines changed
- examine changes to 'f'? [Ynesfdaq?] y
+ examine changes to 'f'?
+ (enter ? for help) [Ynesfdaq?] y
@@ -1,6 +1,5 @@
-a
@@ -64,7 +65,8 @@
3
4
5
- apply change 1/6 to 'f'? [Ynesfdaq?] y
+ apply change 1/6 to 'f'?
+ (enter ? for help) [Ynesfdaq?] y
@@ -2,6 +1,5 @@
1
@@ -73,11 +75,13 @@
4
5
-b
- apply change 2/6 to 'f'? [Ynesfdaq?] y
+ apply change 2/6 to 'f'?
+ (enter ? for help) [Ynesfdaq?] y
diff --git a/folder1/g b/folder1/g
2 hunks, 2 lines changed
- examine changes to 'folder1/g'? [Ynesfdaq?] y
+ examine changes to 'folder1/g'?
+ (enter ? for help) [Ynesfdaq?] y
@@ -1,6 +1,5 @@
-c
@@ -86,7 +90,8 @@
3
4
5
- apply change 3/6 to 'folder1/g'? [Ynesfdaq?] ?
+ apply change 3/6 to 'folder1/g'?
+ (enter ? for help) [Ynesfdaq?] ?
y - yes, apply this change
n - no, skip this change
@@ -97,7 +102,8 @@
a - apply all changes to all remaining files
q - quit, applying no changes
? - ? (display help)
- apply change 3/6 to 'folder1/g'? [Ynesfdaq?] y
+ apply change 3/6 to 'folder1/g'?
+ (enter ? for help) [Ynesfdaq?] y
@@ -2,6 +1,5 @@
1
@@ -106,11 +112,13 @@
4
5
-d
- apply change 4/6 to 'folder1/g'? [Ynesfdaq?] n
+ apply change 4/6 to 'folder1/g'?
+ (enter ? for help) [Ynesfdaq?] n
diff --git a/folder2/h b/folder2/h
2 hunks, 2 lines changed
- examine changes to 'folder2/h'? [Ynesfdaq?] n
+ examine changes to 'folder2/h'?
+ (enter ? for help) [Ynesfdaq?] n
reverting f
reverting folder1/g
@@ -141,7 +149,8 @@
$ echo q | hg revert -i -r 2
diff --git a/folder1/g b/folder1/g
1 hunks, 1 lines changed
- examine changes to 'folder1/g'? [Ynesfdaq?] q
+ examine changes to 'folder1/g'?
+ (enter ? for help) [Ynesfdaq?] q
abort: user quit
[255]
@@ -157,7 +166,8 @@
4
5
-d
- apply this change to 'folder1/g'? [Ynesfdaq?] n
+ apply this change to 'folder1/g'?
+ (enter ? for help) [Ynesfdaq?] n
$ ls folder1/
g
@@ -171,7 +181,8 @@
4
5
-d
- apply this change to 'folder1/g'? [Ynesfdaq?] y
+ apply this change to 'folder1/g'?
+ (enter ? for help) [Ynesfdaq?] y
$ ls folder1/
g
@@ -193,7 +204,8 @@
remove added file folder1/i (Yn)? n
diff --git a/f b/f
2 hunks, 2 lines changed
- examine changes to 'f'? [Ynesfdaq?] y
+ examine changes to 'f'?
+ (enter ? for help) [Ynesfdaq?] y
@@ -1,6 +1,5 @@
-a
@@ -202,7 +214,8 @@
3
4
5
- apply change 1/6 to 'f'? [Ynesfdaq?] y
+ apply change 1/6 to 'f'?
+ (enter ? for help) [Ynesfdaq?] y
@@ -2,6 +1,5 @@
1
@@ -211,11 +224,13 @@
4
5
-b
- apply change 2/6 to 'f'? [Ynesfdaq?] y
+ apply change 2/6 to 'f'?
+ (enter ? for help) [Ynesfdaq?] y
diff --git a/folder1/g b/folder1/g
2 hunks, 2 lines changed
- examine changes to 'folder1/g'? [Ynesfdaq?] y
+ examine changes to 'folder1/g'?
+ (enter ? for help) [Ynesfdaq?] y
@@ -1,6 +1,5 @@
-c
@@ -224,7 +239,8 @@
3
4
5
- apply change 3/6 to 'folder1/g'? [Ynesfdaq?] y
+ apply change 3/6 to 'folder1/g'?
+ (enter ? for help) [Ynesfdaq?] y
@@ -2,6 +1,5 @@
1
@@ -233,11 +249,13 @@
4
5
-d
- apply change 4/6 to 'folder1/g'? [Ynesfdaq?] n
+ apply change 4/6 to 'folder1/g'?
+ (enter ? for help) [Ynesfdaq?] n
diff --git a/folder2/h b/folder2/h
2 hunks, 2 lines changed
- examine changes to 'folder2/h'? [Ynesfdaq?] n
+ examine changes to 'folder2/h'?
+ (enter ? for help) [Ynesfdaq?] n
reverting f
reverting folder1/g
@@ -280,7 +298,8 @@
3
4
5
- discard change 1/2 to 'f'? [Ynesfdaq?] ?
+ discard change 1/2 to 'f'?
+ (enter ? for help) [Ynesfdaq?] ?
y - yes, discard this change
n - no, skip this change
@@ -291,7 +310,8 @@
a - discard all changes to all remaining files
q - quit, discarding no changes
? - ? (display help)
- discard change 1/2 to 'f'? [Ynesfdaq?] y
+ discard change 1/2 to 'f'?
+ (enter ? for help) [Ynesfdaq?] y
@@ -2,6 +1,5 @@
1
@@ -300,7 +320,8 @@
4
5
-b
- discard change 2/2 to 'f'? [Ynesfdaq?] n
+ discard change 2/2 to 'f'?
+ (enter ? for help) [Ynesfdaq?] n
$ hg st
M f
@@ -329,14 +350,16 @@
> EOF
diff --git a/f b/f
1 hunks, 1 lines changed
- examine changes to 'f'? [Ynesfdaq?] y
+ examine changes to 'f'?
+ (enter ? for help) [Ynesfdaq?] y
@@ -4,4 +4,3 @@
3
4
5
-b
- discard this change to 'f'? [Ynesfdaq?] n
+ discard this change to 'f'?
+ (enter ? for help) [Ynesfdaq?] n
$ hg update -C .
@@ -363,13 +386,15 @@
> EOF
diff --git a/k b/k
1 hunks, 2 lines changed
- examine changes to 'k'? [Ynesfdaq?] y
+ examine changes to 'k'?
+ (enter ? for help) [Ynesfdaq?] y
@@ -1,1 +1,2 @@
-1
+0
+2
- discard this change to 'k'? [Ynesfdaq?] e
+ discard this change to 'k'?
+ (enter ? for help) [Ynesfdaq?] e
reverting k
$ cat k
@@ -414,13 +439,15 @@
> EOF
diff --git a/a b/a
1 hunks, 1 lines changed
- examine changes to 'a'? [Ynesfdaq?] y
+ examine changes to 'a'?
+ (enter ? for help) [Ynesfdaq?] y
@@ -1,2 +1,1 @@
0
-1
\ No newline at end of file
- apply this change to 'a'? [Ynesfdaq?] y
+ apply this change to 'a'?
+ (enter ? for help) [Ynesfdaq?] y
reverting a
$ cat a
@@ -488,17 +515,20 @@
> EOF
diff --git a/a b/a
2 hunks, 2 lines changed
- examine changes to 'a'? [Ynesfdaq?] y
+ examine changes to 'a'?
+ (enter ? for help) [Ynesfdaq?] y
@@ -1,1 +1,2 @@
+x
a
- keep change 1/2 to 'a'? [Ynesfdaq?] n
+ keep change 1/2 to 'a'?
+ (enter ? for help) [Ynesfdaq?] n
@@ -1,1 +2,2 @@
a
+y
- keep change 2/2 to 'a'? [Ynesfdaq?] e
+ keep change 2/2 to 'a'?
+ (enter ? for help) [Ynesfdaq?] e
reverting a
$ cat a
--- a/tests/test-revset.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-revset.t Mon Jul 22 14:00:33 2019 -0400
@@ -1721,11 +1721,9 @@
4
$ log 'modifies("*")'
4
- 6
$ log 'modifies("set:modified()")'
4
$ log 'id(5)'
- 2
$ log 'only(9)'
8
9
@@ -1834,12 +1832,12 @@
Test hexadecimal revision
$ log 'id(2)'
- $ log 'id(5)'
- 2
- $ hg --config experimental.revisions.prefixhexnode=yes log --template '{rev}\n' -r 'id(x5)'
- 2
- $ hg --config experimental.revisions.prefixhexnode=yes log --template '{rev}\n' -r 'x5'
- 2
+ $ log 'id(8)'
+ 3
+ $ hg --config experimental.revisions.prefixhexnode=yes log --template '{rev}\n' -r 'id(x8)'
+ 3
+ $ hg --config experimental.revisions.prefixhexnode=yes log --template '{rev}\n' -r 'x8'
+ 3
$ hg --config experimental.revisions.prefixhexnode=yes log --template '{rev}\n' -r 'id(x)'
$ hg --config experimental.revisions.prefixhexnode=yes log --template '{rev}\n' -r 'x'
abort: 00changelog.i@: ambiguous identifier!
@@ -1962,25 +1960,26 @@
2147483647
Test working-directory integer revision and node id
-(BUG: '0:wdir()' is still needed to populate wdir revision)
- $ hg debugrevspec '0:wdir() & 2147483647'
+ $ hg debugrevspec '2147483647'
2147483647
- $ hg debugrevspec '0:wdir() & rev(2147483647)'
+ $ hg debugrevspec 'rev(2147483647)'
+ 2147483647
+ $ hg debugrevspec 'ffffffffffffffffffffffffffffffffffffffff'
2147483647
- $ hg debugrevspec '0:wdir() & ffffffffffffffffffffffffffffffffffffffff'
+ $ hg debugrevspec 'ffffffffffff'
2147483647
- $ hg debugrevspec '0:wdir() & ffffffffffff'
+ $ hg debugrevspec 'id(ffffffffffffffffffffffffffffffffffffffff)'
2147483647
- $ hg debugrevspec '0:wdir() & id(ffffffffffffffffffffffffffffffffffffffff)'
+ $ hg debugrevspec 'id(ffffffffffff)'
2147483647
- $ hg debugrevspec '0:wdir() & id(ffffffffffff)'
+ $ hg debugrevspec 'ffffffffffff+000000000000'
2147483647
+ -1
$ cd ..
Test short 'ff...' hash collision
-(BUG: '0:wdir()' is still needed to populate wdir revision)
$ hg init wdir-hashcollision
$ cd wdir-hashcollision
@@ -2006,21 +2005,21 @@
$ hg debugobsolete fffbae3886c8fbb2114296380d276fd37715d571
obsoleted 1 changesets
- $ hg debugrevspec '0:wdir() & fff'
+ $ hg debugrevspec 'fff'
abort: 00changelog.i@fff: ambiguous identifier!
[255]
- $ hg debugrevspec '0:wdir() & ffff'
+ $ hg debugrevspec 'ffff'
abort: 00changelog.i@ffff: ambiguous identifier!
[255]
- $ hg debugrevspec '0:wdir() & fffb'
+ $ hg debugrevspec 'fffb'
abort: 00changelog.i@fffb: ambiguous identifier!
[255]
BROKEN should be '2' (node lookup uses unfiltered repo)
- $ hg debugrevspec '0:wdir() & id(fffb)'
+ $ hg debugrevspec 'id(fffb)'
BROKEN should be '2' (node lookup uses unfiltered repo)
- $ hg debugrevspec '0:wdir() & ffff8'
+ $ hg debugrevspec 'ffff8'
4
- $ hg debugrevspec '0:wdir() & fffff'
+ $ hg debugrevspec 'fffff'
2147483647
$ cd ..
@@ -2075,6 +2074,17 @@
$ log 'parents(merge())'
4
5
+
+ $ hg merge 7
+ 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ (branch merge, don't forget to commit)
+ $ log '0:wdir() & merge()'
+ 6
+ 2147483647
+ $ hg update -qC .
+ $ log '0:wdir() & merge()'
+ 6
+
$ log 'p1(branchpoint())'
0
2
@@ -2084,7 +2094,6 @@
2
$ log 'removes(a)'
2
- 6
$ log 'roots(all())'
0
$ log 'reverse(2 or 3 or 4 or 5)'
@@ -2698,7 +2707,6 @@
$ log 'sort(outgoing() or reverse(removes(a)), rev)'
2
- 6
8
9
@@ -2707,7 +2715,6 @@
$ log 'sort(outgoing() or reverse(removes(a)), -rev)'
9
8
- 6
2
test empty sort key which is noop
--- a/tests/test-revset2.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-revset2.t Mon Jul 22 14:00:33 2019 -0400
@@ -100,7 +100,6 @@
$ log 'parents(outgoing() or removes(a))'
1
4
- 5
8
test that `or` operation combines elements in the right order:
@@ -805,17 +804,17 @@
(real pair)
$ hg diff -r 'tip^^' -r 'tip'
- diff -r 2326846efdab -r 24286f4ae135 .hgtags
+ diff -r 2326846efdab -r d2e607fcf9e4 .hgtags
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/.hgtags Thu Jan 01 00:00:00 1970 +0000
@@ -0,0 +1,1 @@
- +e0cc66ef77e8b6f711815af4e001a6594fde3ba5 1.0
+ +d5e6808a86077d6f5c1ff626d4352d01da7d2a1f 1.0
$ hg diff -r 'tip^^::tip'
- diff -r 2326846efdab -r 24286f4ae135 .hgtags
+ diff -r 2326846efdab -r d2e607fcf9e4 .hgtags
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/.hgtags Thu Jan 01 00:00:00 1970 +0000
@@ -0,0 +1,1 @@
- +e0cc66ef77e8b6f711815af4e001a6594fde3ba5 1.0
+ +d5e6808a86077d6f5c1ff626d4352d01da7d2a1f 1.0
(single rev)
@@ -829,13 +828,13 @@
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/.hgtags * (glob)
@@ -0,0 +1,1 @@
- +e0cc66ef77e8b6f711815af4e001a6594fde3ba5 1.0
+ +d5e6808a86077d6f5c1ff626d4352d01da7d2a1f 1.0
$ hg diff -r 'tip^ or tip^'
diff -r d5d0dcbdc4d9 .hgtags
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/.hgtags * (glob)
@@ -0,0 +1,1 @@
- +e0cc66ef77e8b6f711815af4e001a6594fde3ba5 1.0
+ +d5e6808a86077d6f5c1ff626d4352d01da7d2a1f 1.0
(no rev)
@@ -1231,59 +1230,59 @@
issue4553: check that revset aliases override existing hash prefix
- $ hg log -qr e
- 6:e0cc66ef77e8
+ $ hg log -qr d5e
+ 6:d5e6808a8607
- $ hg log -qr e --config revsetalias.e="all()"
+ $ hg log -qr d5e --config revsetalias.d5e="all()"
0:2785f51eece5
1:d75937da8da0
2:5ed5505e9f1c
3:8528aa5637f2
4:2326846efdab
5:904fa392b941
- 6:e0cc66ef77e8
- 7:013af1973af4
+ 6:d5e6808a8607
+ 7:586353d483b3
8:d5d0dcbdc4d9
- 9:24286f4ae135
+ 9:d2e607fcf9e4
- $ hg log -qr e: --config revsetalias.e="0"
+ $ hg log -qr d5e: --config revsetalias.d5e="0"
0:2785f51eece5
1:d75937da8da0
2:5ed5505e9f1c
3:8528aa5637f2
4:2326846efdab
5:904fa392b941
- 6:e0cc66ef77e8
- 7:013af1973af4
+ 6:d5e6808a8607
+ 7:586353d483b3
8:d5d0dcbdc4d9
- 9:24286f4ae135
+ 9:d2e607fcf9e4
- $ hg log -qr :e --config revsetalias.e="9"
+ $ hg log -qr :d5e --config revsetalias.d5e="9"
0:2785f51eece5
1:d75937da8da0
2:5ed5505e9f1c
3:8528aa5637f2
4:2326846efdab
5:904fa392b941
- 6:e0cc66ef77e8
- 7:013af1973af4
+ 6:d5e6808a8607
+ 7:586353d483b3
8:d5d0dcbdc4d9
- 9:24286f4ae135
+ 9:d2e607fcf9e4
- $ hg log -qr e:
- 6:e0cc66ef77e8
- 7:013af1973af4
+ $ hg log -qr d5e:
+ 6:d5e6808a8607
+ 7:586353d483b3
8:d5d0dcbdc4d9
- 9:24286f4ae135
+ 9:d2e607fcf9e4
- $ hg log -qr :e
+ $ hg log -qr :d5e
0:2785f51eece5
1:d75937da8da0
2:5ed5505e9f1c
3:8528aa5637f2
4:2326846efdab
5:904fa392b941
- 6:e0cc66ef77e8
+ 6:d5e6808a8607
issue2549 - correct optimizations
@@ -1471,7 +1470,7 @@
(check operator priority)
$ echo 'cat2n2($1, $2, $3, $4) = $1 ## $2 or $3 ## $4~2' >> .hg/hgrc
- $ log "cat2n2(2785f5, 1eece5, 24286f, 4ae135)"
+ $ log "cat2n2(2785f5, 1eece5, d2e607, fcf9e4)"
0
4
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-rust-discovery.py Mon Jul 22 14:00:33 2019 -0400
@@ -0,0 +1,111 @@
+from __future__ import absolute_import
+import unittest
+
+try:
+ from mercurial import rustext
+ rustext.__name__ # trigger immediate actual import
+except ImportError:
+ rustext = None
+else:
+ # this would fail already without appropriate ancestor.__package__
+ from mercurial.rustext.discovery import (
+ PartialDiscovery,
+ )
+
+try:
+ from mercurial.cext import parsers as cparsers
+except ImportError:
+ cparsers = None
+
+# picked from test-parse-index2, copied rather than imported
+# so that it stays stable even if test-parse-index2 changes or disappears.
+data_non_inlined = (
+ b'\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01D\x19'
+ b'\x00\x07e\x12\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff'
+ b'\xff\xff\xff\xff\xd1\xf4\xbb\xb0\xbe\xfc\x13\xbd\x8c\xd3\x9d'
+ b'\x0f\xcd\xd9;\x8c\x07\x8cJ/\x00\x00\x00\x00\x00\x00\x00\x00\x00'
+ b'\x00\x00\x00\x00\x00\x00\x01D\x19\x00\x00\x00\x00\x00\xdf\x00'
+ b'\x00\x01q\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\xff'
+ b'\xff\xff\xff\xc1\x12\xb9\x04\x96\xa4Z1t\x91\xdfsJ\x90\xf0\x9bh'
+ b'\x07l&\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
+ b'\x00\x01D\xf8\x00\x00\x00\x00\x01\x1b\x00\x00\x01\xb8\x00\x00'
+ b'\x00\x01\x00\x00\x00\x02\x00\x00\x00\x01\xff\xff\xff\xff\x02\n'
+ b'\x0e\xc6&\xa1\x92\xae6\x0b\x02i\xfe-\xe5\xbao\x05\xd1\xe7\x00'
+ b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01F'
+ b'\x13\x00\x00\x00\x00\x01\xec\x00\x00\x03\x06\x00\x00\x00\x01'
+ b'\x00\x00\x00\x03\x00\x00\x00\x02\xff\xff\xff\xff\x12\xcb\xeby1'
+ b'\xb6\r\x98B\xcb\x07\xbd`\x8f\x92\xd9\xc4\x84\xbdK\x00\x00\x00'
+ b'\x00\x00\x00\x00\x00\x00\x00\x00\x00'
+ )
+
+
+@unittest.skipIf(rustext is None or cparsers is None,
+ "rustext or the C Extension parsers module "
+ "discovery relies on is not available")
+class rustdiscoverytest(unittest.TestCase):
+ """Test the correctness of binding to Rust code.
+
+ This test is merely for the binding to Rust itself: extraction of
+ Python variable, giving back the results etc.
+
+ It is not meant to test the algorithmic correctness of the provided
+ methods. Hence the very simple embedded index data is good enough.
+
+ Algorithmic correctness is asserted by the Rust unit tests.
+ """
+
+ def parseindex(self):
+ return cparsers.parse_index2(data_non_inlined, False)[0]
+
+ def testindex(self):
+ idx = self.parseindex()
+ # checking our assumptions about the index binary data:
+ self.assertEqual({i: (r[5], r[6]) for i, r in enumerate(idx)},
+ {0: (-1, -1),
+ 1: (0, -1),
+ 2: (1, -1),
+ 3: (2, -1)})
+
+ def testaddcommonsmissings(self):
+ idx = self.parseindex()
+ disco = PartialDiscovery(idx, [3])
+ self.assertFalse(disco.hasinfo())
+ self.assertFalse(disco.iscomplete())
+
+ disco.addcommons([1])
+ self.assertTrue(disco.hasinfo())
+ self.assertFalse(disco.iscomplete())
+
+ disco.addmissings([2])
+ self.assertTrue(disco.hasinfo())
+ self.assertTrue(disco.iscomplete())
+
+ self.assertEqual(disco.commonheads(), {1})
+
+ def testaddmissingsstats(self):
+ idx = self.parseindex()
+ disco = PartialDiscovery(idx, [3])
+ self.assertIsNone(disco.stats()['undecided'], None)
+
+ disco.addmissings([2])
+ self.assertEqual(disco.stats()['undecided'], 2)
+
+ def testaddinfocommonfirst(self):
+ idx = self.parseindex()
+ disco = PartialDiscovery(idx, [3])
+ disco.addinfo([(1, True), (2, False)])
+ self.assertTrue(disco.hasinfo())
+ self.assertTrue(disco.iscomplete())
+ self.assertEqual(disco.commonheads(), {1})
+
+ def testaddinfomissingfirst(self):
+ idx = self.parseindex()
+ disco = PartialDiscovery(idx, [3])
+ disco.addinfo([(2, False), (1, True)])
+ self.assertTrue(disco.hasinfo())
+ self.assertTrue(disco.iscomplete())
+ self.assertEqual(disco.commonheads(), {1})
+
+if __name__ == '__main__':
+ import silenttestrunner
+ silenttestrunner.main(__name__)
--- a/tests/test-server-view.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-server-view.t Mon Jul 22 14:00:33 2019 -0400
@@ -34,5 +34,30 @@
date: Thu Jan 01 00:00:00 1970 +0000
summary: r0
+
+Check same result using `experimental.extra-filter-revs`
+
+ $ hg -R test --config experimental.extra-filter-revs='not public()' serve -p $HGPORT1 -d --pid-file=hg2.pid -E errors.log
+ $ cat hg2.pid >> $DAEMON_PIDS
+ $ hg -R test2 incoming http://foo:xyzzy@localhost:$HGPORT1/
+ comparing with http://foo:***@localhost:$HGPORT1/
+ changeset: 0:1ea73414a91b
+ tag: tip
+ user: debugbuilddag
+ date: Thu Jan 01 00:00:00 1970 +0000
+ summary: r0
+
+ $ hg -R test --config experimental.extra-filter-revs='not public()' debugupdatecache
+ $ ls -1 test/.hg/cache/
+ branch2-base%89c45d2fa07e
+ branch2-served
+ hgtagsfnodes1
+ rbc-names-v1
+ rbc-revs-v1
+ tags2
+ tags2-served%89c45d2fa07e
+
+cleanup
+
$ cat errors.log
$ killdaemons.py
--- a/tests/test-setdiscovery.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-setdiscovery.t Mon Jul 22 14:00:33 2019 -0400
@@ -926,7 +926,7 @@
common heads: 7ead0cba2838
-One with >200 heads, which used to use up all of the sample:
+One with >200 heads. We now switch to send them all in the initial roundtrip, but still do sampling for the later request.
$ hg init manyheads
$ cd manyheads
@@ -974,20 +974,19 @@
searching for changes
taking quick initial sample
searching: 2 queries
- query 2; still undecided: 1240, sample size is: 100
+ query 2; still undecided: 1080, sample size is: 100
sampling from both directions
searching: 3 queries
- query 3; still undecided: 1140, sample size is: 200
+ query 3; still undecided: 980, sample size is: 200
sampling from both directions
searching: 4 queries
- query 4; still undecided: \d+, sample size is: 200 (re)
+ query 4; still undecided: 435, sample size is: 210 (no-py3 !)
+ query 4; still undecided: 430, sample size is: 210 (py3 !)
sampling from both directions
searching: 5 queries
- query 5; still undecided: \d+, sample size is: 200 (re)
- sampling from both directions
- searching: 6 queries
- query 6; still undecided: \d+, sample size is: \d+ (re)
- 6 total queries in *.????s (glob)
+ query 5; still undecided: 185, sample size is: 185 (no-py3 !)
+ query 5; still undecided: 187, sample size is: 185 (py3 !)
+ 5 total queries in *.????s (glob)
elapsed time: * seconds (glob)
heads summary:
total common heads: 1
@@ -1116,6 +1115,6 @@
$ hg -R r1 --config extensions.blackbox= blackbox --config blackbox.track=
* @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> serve --cmdserver chgunix * (glob) (chg !)
* @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> -R r1 outgoing r2 *-T{rev} * --config *extensions.blackbox=* (glob)
- * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> found 101 common and 1 unknown server heads, 2 roundtrips in *.????s (glob)
+ * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> found 101 common and 1 unknown server heads, 1 roundtrips in *.????s (glob)
* @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> -R r1 outgoing r2 *-T{rev} * --config *extensions.blackbox=* exited 0 after *.?? seconds (glob)
$ cd ..
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-share-bookmarks.t Mon Jul 22 14:00:33 2019 -0400
@@ -0,0 +1,281 @@
+#testcases vfs svfs
+
+ $ echo "[extensions]" >> $HGRCPATH
+ $ echo "share = " >> $HGRCPATH
+
+#if svfs
+ $ echo "[format]" >> $HGRCPATH
+ $ echo "bookmarks-in-store = yes " >> $HGRCPATH
+#endif
+
+prepare repo1
+
+ $ hg init repo1
+ $ cd repo1
+ $ echo a > a
+ $ hg commit -A -m'init'
+ adding a
+ $ echo a >> a
+ $ hg commit -m'change in shared clone'
+ $ echo b > b
+ $ hg commit -A -m'another file'
+ adding b
+
+share it
+
+ $ cd ..
+ $ hg share repo1 repo2
+ updating working directory
+ 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
+
+test sharing bookmarks
+
+ $ hg share -B repo1 repo3
+ updating working directory
+ 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ cd repo1
+ $ hg bookmark bm1
+ $ hg bookmarks
+ * bm1 2:c2e0ac586386
+ $ cd ../repo2
+ $ hg book bm2
+ $ hg bookmarks
+ bm1 2:c2e0ac586386 (svfs !)
+ * bm2 2:c2e0ac586386
+ $ cd ../repo3
+ $ hg bookmarks
+ bm1 2:c2e0ac586386
+ bm2 2:c2e0ac586386 (svfs !)
+ $ hg book bm3
+ $ hg bookmarks
+ bm1 2:c2e0ac586386
+ bm2 2:c2e0ac586386 (svfs !)
+ * bm3 2:c2e0ac586386
+ $ cd ../repo1
+ $ hg bookmarks
+ * bm1 2:c2e0ac586386
+ bm2 2:c2e0ac586386 (svfs !)
+ bm3 2:c2e0ac586386
+
+check whether HG_PENDING makes pending changes only in relatd
+repositories visible to an external hook.
+
+In "hg share" case, another transaction can't run in other
+repositories sharing same source repository, because starting
+transaction requires locking store of source repository.
+
+Therefore, this test scenario ignores checking visibility of
+.hg/bookmarks.pending in repo2, which shares repo1 without bookmarks.
+
+ $ cat > $TESTTMP/checkbookmarks.sh <<EOF
+ > echo "@repo1"
+ > hg -R "$TESTTMP/repo1" bookmarks
+ > echo "@repo2"
+ > hg -R "$TESTTMP/repo2" bookmarks
+ > echo "@repo3"
+ > hg -R "$TESTTMP/repo3" bookmarks
+ > exit 1 # to avoid adding new bookmark for subsequent tests
+ > EOF
+
+ $ cd ../repo1
+ $ hg --config hooks.pretxnclose="sh $TESTTMP/checkbookmarks.sh" -q book bmX
+ @repo1
+ bm1 2:c2e0ac586386
+ bm2 2:c2e0ac586386 (svfs !)
+ bm3 2:c2e0ac586386
+ * bmX 2:c2e0ac586386
+ @repo2
+ bm1 2:c2e0ac586386 (svfs !)
+ * bm2 2:c2e0ac586386
+ bm3 2:c2e0ac586386 (svfs !)
+ @repo3
+ bm1 2:c2e0ac586386
+ bm2 2:c2e0ac586386 (svfs !)
+ * bm3 2:c2e0ac586386
+ bmX 2:c2e0ac586386 (vfs !)
+ transaction abort!
+ rollback completed
+ abort: pretxnclose hook exited with status 1
+ [255]
+ $ hg book bm1
+
+FYI, in contrast to above test, bmX is invisible in repo1 (= shared
+src), because (1) HG_PENDING refers only repo3 and (2)
+"bookmarks.pending" is written only into repo3.
+
+ $ cd ../repo3
+ $ hg --config hooks.pretxnclose="sh $TESTTMP/checkbookmarks.sh" -q book bmX
+ @repo1
+ * bm1 2:c2e0ac586386
+ bm2 2:c2e0ac586386 (svfs !)
+ bm3 2:c2e0ac586386
+ @repo2
+ bm1 2:c2e0ac586386 (svfs !)
+ * bm2 2:c2e0ac586386
+ bm3 2:c2e0ac586386 (svfs !)
+ @repo3
+ bm1 2:c2e0ac586386
+ bm2 2:c2e0ac586386 (svfs !)
+ bm3 2:c2e0ac586386
+ * bmX 2:c2e0ac586386
+ transaction abort!
+ rollback completed
+ abort: pretxnclose hook exited with status 1
+ [255]
+ $ hg book bm3
+
+clean up bm2 since it's uninteresting (not shared in the vfs case and
+same as bm3 in the svfs case)
+ $ cd ../repo2
+ $ hg book -d bm2
+
+ $ cd ../repo1
+
+test that commits work
+
+ $ echo 'shared bookmarks' > a
+ $ hg commit -m 'testing shared bookmarks'
+ $ hg bookmarks
+ * bm1 3:b87954705719
+ bm3 2:c2e0ac586386
+ $ cd ../repo3
+ $ hg bookmarks
+ bm1 3:b87954705719
+ * bm3 2:c2e0ac586386
+ $ echo 'more shared bookmarks' > a
+ $ hg commit -m 'testing shared bookmarks'
+ created new head
+ $ hg bookmarks
+ bm1 3:b87954705719
+ * bm3 4:62f4ded848e4
+ $ cd ../repo1
+ $ hg bookmarks
+ * bm1 3:b87954705719
+ bm3 4:62f4ded848e4
+ $ cd ..
+
+test pushing bookmarks works
+
+ $ hg clone repo3 repo4
+ updating to branch default
+ 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ cd repo4
+ $ hg boo bm4
+ $ echo foo > b
+ $ hg commit -m 'foo in b'
+ $ hg boo
+ bm1 3:b87954705719
+ bm3 4:62f4ded848e4
+ * bm4 5:92793bfc8cad
+ $ hg push -B bm4
+ pushing to $TESTTMP/repo3
+ searching for changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 1 changesets with 1 changes to 1 files
+ exporting bookmark bm4
+ $ cd ../repo1
+ $ hg bookmarks
+ * bm1 3:b87954705719
+ bm3 4:62f4ded848e4
+ bm4 5:92793bfc8cad
+ $ cd ../repo3
+ $ hg bookmarks
+ bm1 3:b87954705719
+ * bm3 4:62f4ded848e4
+ bm4 5:92793bfc8cad
+ $ cd ..
+
+test behavior when sharing a shared repo
+
+ $ hg share -B repo3 missingdir/repo5
+ updating working directory
+ 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ cd missingdir/repo5
+ $ hg book
+ bm1 3:b87954705719
+ bm3 4:62f4ded848e4
+ bm4 5:92793bfc8cad
+ $ cd ../..
+
+test what happens when an active bookmark is deleted
+
+ $ cd repo1
+ $ hg boo -d bm3
+ $ hg boo
+ * bm1 3:b87954705719
+ bm4 5:92793bfc8cad
+ $ cd ../repo3
+ $ hg boo
+ bm1 3:b87954705719
+ bm4 5:92793bfc8cad
+ $ cd ..
+
+verify that bookmarks are not written on failed transaction
+
+ $ cat > failpullbookmarks.py << EOF
+ > """A small extension that makes bookmark pulls fail, for testing"""
+ > from __future__ import absolute_import
+ > from mercurial import (
+ > error,
+ > exchange,
+ > extensions,
+ > )
+ > def _pullbookmarks(orig, pullop):
+ > orig(pullop)
+ > raise error.HookAbort('forced failure by extension')
+ > def extsetup(ui):
+ > extensions.wrapfunction(exchange, '_pullbookmarks', _pullbookmarks)
+ > EOF
+ $ cd repo4
+ $ hg boo
+ bm1 3:b87954705719
+ bm3 4:62f4ded848e4
+ * bm4 5:92793bfc8cad
+ $ cd ../repo3
+ $ hg boo
+ bm1 3:b87954705719
+ bm4 5:92793bfc8cad
+ $ hg --config "extensions.failpullbookmarks=$TESTTMP/failpullbookmarks.py" pull $TESTTMP/repo4
+ pulling from $TESTTMP/repo4
+ searching for changes
+ no changes found
+ adding remote bookmark bm3
+ abort: forced failure by extension
+ [255]
+ $ hg boo
+ bm1 3:b87954705719
+ bm4 5:92793bfc8cad
+ $ hg pull $TESTTMP/repo4
+ pulling from $TESTTMP/repo4
+ searching for changes
+ no changes found
+ adding remote bookmark bm3
+ 1 local changesets published
+ $ hg boo
+ bm1 3:b87954705719
+ * bm3 4:62f4ded848e4
+ bm4 5:92793bfc8cad
+ $ cd ..
+
+verify bookmark behavior after unshare
+
+ $ cd repo3
+ $ hg unshare
+ $ hg boo
+ bm1 3:b87954705719
+ * bm3 4:62f4ded848e4
+ bm4 5:92793bfc8cad
+ $ hg boo -d bm4
+ $ hg boo bm5
+ $ hg boo
+ bm1 3:b87954705719
+ bm3 4:62f4ded848e4
+ * bm5 4:62f4ded848e4
+ $ cd ../repo1
+ $ hg boo
+ * bm1 3:b87954705719
+ bm3 4:62f4ded848e4
+ bm4 5:92793bfc8cad
+ $ cd ..
--- a/tests/test-share.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-share.t Mon Jul 22 14:00:33 2019 -0400
@@ -21,6 +21,14 @@
$ cd repo2
$ test -d .hg/store
[1]
+ $ hg root -Tjson | sed 's|\\\\|\\|g'
+ [
+ {
+ "hgpath": "$TESTTMP/repo2/.hg",
+ "reporoot": "$TESTTMP/repo2",
+ "storepath": "$TESTTMP/repo1/.hg/store"
+ }
+ ]
share shouldn't have a full cache dir, original repo should
@@ -157,118 +165,9 @@
$ cd ..
-test sharing bookmarks
-
- $ hg share -B repo1 repo3
- updating working directory
- 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
- $ cd repo1
- $ hg bookmark bm1
- $ hg bookmarks
- * bm1 2:c2e0ac586386
- $ cd ../repo2
- $ hg book bm2
- $ hg bookmarks
- * bm2 3:0e6e70d1d5f1
- $ cd ../repo3
- $ hg bookmarks
- bm1 2:c2e0ac586386
- $ hg book bm3
- $ hg bookmarks
- bm1 2:c2e0ac586386
- * bm3 2:c2e0ac586386
- $ cd ../repo1
- $ hg bookmarks
- * bm1 2:c2e0ac586386
- bm3 2:c2e0ac586386
-
-check whether HG_PENDING makes pending changes only in relatd
-repositories visible to an external hook.
-
-In "hg share" case, another transaction can't run in other
-repositories sharing same source repository, because starting
-transaction requires locking store of source repository.
-
-Therefore, this test scenario ignores checking visibility of
-.hg/bookmakrs.pending in repo2, which shares repo1 without bookmarks.
-
- $ cat > $TESTTMP/checkbookmarks.sh <<EOF
- > echo "@repo1"
- > hg -R "$TESTTMP/repo1" bookmarks
- > echo "@repo2"
- > hg -R "$TESTTMP/repo2" bookmarks
- > echo "@repo3"
- > hg -R "$TESTTMP/repo3" bookmarks
- > exit 1 # to avoid adding new bookmark for subsequent tests
- > EOF
-
- $ cd ../repo1
- $ hg --config hooks.pretxnclose="sh $TESTTMP/checkbookmarks.sh" -q book bmX
- @repo1
- bm1 2:c2e0ac586386
- bm3 2:c2e0ac586386
- * bmX 2:c2e0ac586386
- @repo2
- * bm2 3:0e6e70d1d5f1
- @repo3
- bm1 2:c2e0ac586386
- * bm3 2:c2e0ac586386
- bmX 2:c2e0ac586386
- transaction abort!
- rollback completed
- abort: pretxnclose hook exited with status 1
- [255]
- $ hg book bm1
-
-FYI, in contrast to above test, bmX is invisible in repo1 (= shared
-src), because (1) HG_PENDING refers only repo3 and (2)
-"bookmarks.pending" is written only into repo3.
-
- $ cd ../repo3
- $ hg --config hooks.pretxnclose="sh $TESTTMP/checkbookmarks.sh" -q book bmX
- @repo1
- * bm1 2:c2e0ac586386
- bm3 2:c2e0ac586386
- @repo2
- * bm2 3:0e6e70d1d5f1
- @repo3
- bm1 2:c2e0ac586386
- bm3 2:c2e0ac586386
- * bmX 2:c2e0ac586386
- transaction abort!
- rollback completed
- abort: pretxnclose hook exited with status 1
- [255]
- $ hg book bm3
-
- $ cd ../repo1
-
-test that commits work
-
- $ echo 'shared bookmarks' > a
- $ hg commit -m 'testing shared bookmarks'
- $ hg bookmarks
- * bm1 3:b87954705719
- bm3 2:c2e0ac586386
- $ cd ../repo3
- $ hg bookmarks
- bm1 3:b87954705719
- * bm3 2:c2e0ac586386
- $ echo 'more shared bookmarks' > a
- $ hg commit -m 'testing shared bookmarks'
- created new head
- $ hg bookmarks
- bm1 3:b87954705719
- * bm3 4:62f4ded848e4
- $ cd ../repo1
- $ hg bookmarks
- * bm1 3:b87954705719
- bm3 4:62f4ded848e4
- $ cd ..
-
non largefiles repos won't enable largefiles
- $ hg share --config extensions.largefiles= repo3 sharedrepo
+ $ hg share --config extensions.largefiles= repo2 sharedrepo
The fsmonitor extension is incompatible with the largefiles extension and has been disabled. (fsmonitor !)
The fsmonitor extension is incompatible with the largefiles extension and has been disabled. (fsmonitor !)
updating working directory
@@ -276,132 +175,6 @@
$ [ -f sharedrepo/.hg/hgrc ]
[1]
-test pushing bookmarks works
-
- $ hg clone repo3 repo4
- updating to branch default
- 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
- $ cd repo4
- $ hg boo bm4
- $ echo foo > b
- $ hg commit -m 'foo in b'
- $ hg boo
- bm1 3:b87954705719
- bm3 4:62f4ded848e4
- * bm4 5:92793bfc8cad
- $ hg push -B bm4
- pushing to $TESTTMP/repo3
- searching for changes
- adding changesets
- adding manifests
- adding file changes
- added 1 changesets with 1 changes to 1 files
- exporting bookmark bm4
- $ cd ../repo1
- $ hg bookmarks
- * bm1 3:b87954705719
- bm3 4:62f4ded848e4
- bm4 5:92793bfc8cad
- $ cd ../repo3
- $ hg bookmarks
- bm1 3:b87954705719
- * bm3 4:62f4ded848e4
- bm4 5:92793bfc8cad
- $ cd ..
-
-test behavior when sharing a shared repo
-
- $ hg share -B repo3 missingdir/repo5
- updating working directory
- 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
- $ cd missingdir/repo5
- $ hg book
- bm1 3:b87954705719
- bm3 4:62f4ded848e4
- bm4 5:92793bfc8cad
- $ cd ../..
-
-test what happens when an active bookmark is deleted
-
- $ cd repo1
- $ hg boo -d bm3
- $ hg boo
- * bm1 3:b87954705719
- bm4 5:92793bfc8cad
- $ cd ../repo3
- $ hg boo
- bm1 3:b87954705719
- bm4 5:92793bfc8cad
- $ cd ..
-
-verify that bookmarks are not written on failed transaction
-
- $ cat > failpullbookmarks.py << EOF
- > """A small extension that makes bookmark pulls fail, for testing"""
- > from __future__ import absolute_import
- > from mercurial import (
- > error,
- > exchange,
- > extensions,
- > )
- > def _pullbookmarks(orig, pullop):
- > orig(pullop)
- > raise error.HookAbort('forced failure by extension')
- > def extsetup(ui):
- > extensions.wrapfunction(exchange, '_pullbookmarks', _pullbookmarks)
- > EOF
- $ cd repo4
- $ hg boo
- bm1 3:b87954705719
- bm3 4:62f4ded848e4
- * bm4 5:92793bfc8cad
- $ cd ../repo3
- $ hg boo
- bm1 3:b87954705719
- bm4 5:92793bfc8cad
- $ hg --config "extensions.failpullbookmarks=$TESTTMP/failpullbookmarks.py" pull $TESTTMP/repo4
- pulling from $TESTTMP/repo4
- searching for changes
- no changes found
- adding remote bookmark bm3
- abort: forced failure by extension
- [255]
- $ hg boo
- bm1 3:b87954705719
- bm4 5:92793bfc8cad
- $ hg pull $TESTTMP/repo4
- pulling from $TESTTMP/repo4
- searching for changes
- no changes found
- adding remote bookmark bm3
- 1 local changesets published
- $ hg boo
- bm1 3:b87954705719
- * bm3 4:62f4ded848e4
- bm4 5:92793bfc8cad
- $ cd ..
-
-verify bookmark behavior after unshare
-
- $ cd repo3
- $ hg unshare
- $ hg boo
- bm1 3:b87954705719
- * bm3 4:62f4ded848e4
- bm4 5:92793bfc8cad
- $ hg boo -d bm4
- $ hg boo bm5
- $ hg boo
- bm1 3:b87954705719
- bm3 4:62f4ded848e4
- * bm5 4:62f4ded848e4
- $ cd ../repo1
- $ hg boo
- * bm1 3:b87954705719
- bm3 4:62f4ded848e4
- bm4 5:92793bfc8cad
- $ cd ..
-
test shared clones using relative paths work
$ mkdir thisdir
--- a/tests/test-shelve.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-shelve.t Mon Jul 22 14:00:33 2019 -0400
@@ -3,7 +3,6 @@
$ cat <<EOF >> $HGRCPATH
> [extensions]
> mq =
- > shelve =
> [defaults]
> diff = --nodates --git
> qnew = --date '0 0'
@@ -65,8 +64,6 @@
To delete specific shelved changes, use "--delete". To delete all shelved
changes, use "--cleanup".
- (use 'hg help -e shelve' to show help for the shelve extension)
-
options ([+] can be repeated):
-A --addremove mark new/missing files as added/removed before
@@ -82,7 +79,7 @@
-n --name NAME use the given name for the shelved commit
-p --patch output patches for changes (provide the names of the
shelved changes as positional arguments)
- -i --interactive interactive mode, only works while creating a shelve
+ -i --interactive interactive mode
--stat output diffstat-style summary of changes (provide
the names of the shelved changes as positional
arguments)
@@ -759,21 +756,24 @@
> EOF
diff --git a/a/a b/a/a
2 hunks, 2 lines changed
- examine changes to 'a/a'? [Ynesfdaq?] y
+ examine changes to 'a/a'?
+ (enter ? for help) [Ynesfdaq?] y
@@ -1,3 +1,4 @@
+a
a
c
x
- record change 1/2 to 'a/a'? [Ynesfdaq?] y
+ record change 1/2 to 'a/a'?
+ (enter ? for help) [Ynesfdaq?] y
@@ -1,3 +2,4 @@
a
c
x
+x
- record change 2/2 to 'a/a'? [Ynesfdaq?] n
+ record change 2/2 to 'a/a'?
+ (enter ? for help) [Ynesfdaq?] n
shelved as test
merging a/a
@@ -1153,7 +1153,233 @@
-- trying to pull in the shelve bits
-- unshelve should abort otherwise, it'll eat my second parent.
$ hg unshelve
- abort: cannot unshelve while merging
+ abort: outstanding uncommitted merge
+ (use 'hg commit' or 'hg merge --abort')
[255]
$ cd ..
+
+-- test for interactive mode on unshelve
+
+ $ hg init a
+ $ cd a
+ $ echo > b
+ $ hg ci -Am b
+ adding b
+ $ echo > c
+ $ echo > d
+ $ hg add .
+ adding c
+ adding d
+ $ hg shelve
+ shelved as default
+ 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
+ $ echo > e
+ $ hg add e
+ $ hg ci -m e
+ $ hg shelve --patch
+ default (*s ago) changes to: b (glob)
+
+ diff --git a/c b/c
+ new file mode 100644
+ --- /dev/null
+ +++ b/c
+ @@ -0,0 +1,1 @@
+ +
+ diff --git a/d b/d
+ new file mode 100644
+ --- /dev/null
+ +++ b/d
+ @@ -0,0 +1,1 @@
+ +
+ $ hg unshelve -i <<EOF
+ > y
+ > y
+ > y
+ > n
+ > EOF
+ unshelving change 'default'
+ rebasing shelved changes
+ diff --git a/c b/c
+ new file mode 100644
+ examine changes to 'c'?
+ (enter ? for help) [Ynesfdaq?] y
+
+ @@ -0,0 +1,1 @@
+ +
+ record change 1/2 to 'c'?
+ (enter ? for help) [Ynesfdaq?] y
+
+ diff --git a/d b/d
+ new file mode 100644
+ examine changes to 'd'?
+ (enter ? for help) [Ynesfdaq?] y
+
+ @@ -0,0 +1,1 @@
+ +
+ record change 2/2 to 'd'?
+ (enter ? for help) [Ynesfdaq?] n
+
+ $ ls
+ b
+ c
+ e
+-- shelve should not contain `c` now
+ $ hg shelve --patch
+ default (*s ago) changes to: b (glob)
+
+ diff --git a/d b/d
+ new file mode 100644
+ --- /dev/null
+ +++ b/d
+ @@ -0,0 +1,1 @@
+ +
+ $ hg unshelve -i <<EOF
+ > y
+ > y
+ > EOF
+ unshelving change 'default'
+ rebasing shelved changes
+ diff --git a/d b/d
+ new file mode 100644
+ examine changes to 'd'?
+ (enter ? for help) [Ynesfdaq?] y
+
+ @@ -0,0 +1,1 @@
+ +
+ record this change to 'd'?
+ (enter ? for help) [Ynesfdaq?] y
+
+ $ ls
+ b
+ c
+ d
+ e
+ $ hg shelve --list
+
+-- now, unshelve selected changes from a file
+
+ $ echo B > foo
+ $ hg add foo
+ $ hg ci -m 'add B to foo'
+ $ cat > foo <<EOF
+ > A
+ > B
+ > C
+ > EOF
+ $ hg shelve
+ shelved as default
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ cat foo
+ B
+ $ hg unshelve -i <<EOF
+ > y
+ > y
+ > n
+ > EOF
+ unshelving change 'default'
+ rebasing shelved changes
+ diff --git a/foo b/foo
+ 2 hunks, 2 lines changed
+ examine changes to 'foo'?
+ (enter ? for help) [Ynesfdaq?] y
+
+ @@ -1,1 +1,2 @@
+ +A
+ B
+ record change 1/2 to 'foo'?
+ (enter ? for help) [Ynesfdaq?] y
+
+ @@ -1,1 +2,2 @@
+ B
+ +C
+ record change 2/2 to 'foo'?
+ (enter ? for help) [Ynesfdaq?] n
+
+ $ cat foo
+ A
+ B
+ $ hg shelve --patch
+ default (*s ago) changes to: add B to foo (glob)
+
+ diff --git a/foo b/foo
+ --- a/foo
+ +++ b/foo
+ @@ -1,2 +1,3 @@
+ A
+ B
+ +C
+
+-- unshelve interactive on conflicts
+
+ $ echo A >> bar1
+ $ echo A >> bar2
+ $ hg add bar1 bar2
+ $ hg ci -m 'add A to bars'
+ $ echo B >> bar1
+ $ echo B >> bar2
+ $ hg shelve
+ shelved as default-01
+ 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ echo C >> bar1
+ $ echo C >> bar2
+ $ hg ci -m 'add C to bars'
+ $ hg unshelve -i
+ unshelving change 'default-01'
+ rebasing shelved changes
+ merging bar1
+ merging bar2
+ warning: conflicts while merging bar1! (edit, then use 'hg resolve --mark')
+ warning: conflicts while merging bar2! (edit, then use 'hg resolve --mark')
+ unresolved conflicts (see 'hg resolve', then 'hg unshelve --continue')
+ [1]
+
+ $ cat > bar1 <<EOF
+ > A
+ > B
+ > C
+ > EOF
+ $ cat > bar2 <<EOF
+ > A
+ > B
+ > C
+ > EOF
+ $ hg resolve -m bar1 bar2
+ (no more unresolved files)
+ continue: hg unshelve --continue
+ $ cat bar1
+ A
+ B
+ C
+ $ hg unshelve --continue -i <<EOF
+ > y
+ > y
+ > y
+ > y
+ > EOF
+ unshelving change 'default-01'
+ diff --git a/bar1 b/bar1
+ 1 hunks, 1 lines changed
+ examine changes to 'bar1'?
+ (enter ? for help) [Ynesfdaq?] y
+
+ @@ -1,2 +1,3 @@
+ A
+ +B
+ C
+ record change 1/2 to 'bar1'?
+ (enter ? for help) [Ynesfdaq?] y
+
+ diff --git a/bar2 b/bar2
+ 1 hunks, 1 lines changed
+ examine changes to 'bar2'?
+ (enter ? for help) [Ynesfdaq?] y
+
+ @@ -1,2 +1,3 @@
+ A
+ +B
+ C
+ record change 2/2 to 'bar2'?
+ (enter ? for help) [Ynesfdaq?] y
+
+ unshelve of 'default-01' complete
--- a/tests/test-shelve2.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-shelve2.t Mon Jul 22 14:00:33 2019 -0400
@@ -1,9 +1,10 @@
#testcases stripbased phasebased
+#testcases abortflag abortcommand
+#testcases continueflag continuecommand
$ cat <<EOF >> $HGRCPATH
> [extensions]
> mq =
- > shelve =
> [defaults]
> diff = --nodates --git
> qnew = --date '0 0'
@@ -20,6 +21,20 @@
#endif
+#if abortflag
+ $ cat >> $HGRCPATH <<EOF
+ > [alias]
+ > abort = unshelve --abort
+ > EOF
+#endif
+
+#if continueflag
+ $ cat >> $HGRCPATH <<EOF
+ > [alias]
+ > continue = unshelve --continue
+ > EOF
+#endif
+
shelve should leave dirstate clean (issue4055)
$ hg init shelverebase
@@ -286,7 +301,14 @@
>>>>>>> working-copy: aef214a5229c - shelve: changes to: commit stuff
$ cat f.orig
g
- $ hg unshelve --abort
+
+#if abortcommand
+when in dry-run mode
+ $ hg abort --dry-run
+ unshelve in progress, will be aborted
+#endif
+
+ $ hg abort
unshelve of 'default' aborted
$ hg st
? f.orig
@@ -546,7 +568,7 @@
$ hg resolve --mark a
(no more unresolved files)
continue: hg unshelve --continue
- $ hg unshelve --continue
+ $ hg continue
marked working directory as branch test
unshelve of 'default' complete
$ cat a
@@ -627,7 +649,13 @@
$ hg resolve --mark a
(no more unresolved files)
continue: hg unshelve --continue
- $ hg unshelve --continue
+
+#if continuecommand
+ $ hg continue --dry-run
+ unshelve in progress, will be resumed
+#endif
+
+ $ hg continue
unshelve of 'default' complete
$ cat a
aaabbbccc
@@ -690,21 +718,30 @@
$ echo somethingsomething > .hg/shelvedstate
Unshelve --continue fails with appropriate message if shelvedstate is corrupted
- $ hg unshelve --continue
+ $ hg continue
abort: corrupted shelved state file
(please run hg unshelve --abort to abort unshelve operation)
[255]
Unshelve --abort works with a corrupted shelvedstate
- $ hg unshelve --abort
- could not read shelved state file, your working copy may be in an unexpected state
+ $ hg abort
+ abort: could not read shelved state file, your working copy may be in an unexpected state
please update to some commit
+
+ [255]
Unshelve --abort fails with appropriate message if there's no unshelve in
progress
+
+#if abortflag
$ hg unshelve --abort
abort: no unshelve in progress
[255]
+#else
+ $ hg abort
+ aborting the merge, updating back to 9451eaa6eee3
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+#endif
$ cd ..
Unshelve respects --keep even if user intervention is needed
@@ -728,7 +765,7 @@
$ hg resolve --mark file
(no more unresolved files)
continue: hg unshelve --continue
- $ hg unshelve --continue
+ $ hg continue
unshelve of 'default' complete
$ hg shelve --list
default (*s ago) * changes to: 1 (glob)
@@ -799,7 +836,7 @@
(no more unresolved files)
continue: hg unshelve --continue
mercurial does not crash
- $ hg unshelve --continue
+ $ hg continue
unshelve of 'ashelve' complete
#if phasebased
@@ -823,7 +860,7 @@
warning: conflicts while merging a! (edit, then use 'hg resolve --mark')
unresolved conflicts (see 'hg resolve', then 'hg unshelve --continue')
[1]
- $ hg unshelve --abort
+ $ hg abort
unshelve of 'default' aborted
Unshelve without .shelve metadata (can happen when upgrading a repository with old shelve)
@@ -842,9 +879,44 @@
[1]
$ cat .hg/shelved/default.shelve
node=82e0cb9893247d12667017593ce1e5655860f1ac
- $ hg unshelve --abort
+ $ hg abort
unshelve of 'default' aborted
#endif
$ cd ..
+
+Block merge abort when unshelve in progress(issue6160)
+------------------------------------------------------
+
+ $ hg init a
+ $ cd a
+ $ echo foo > a ; hg commit -qAm "initial commit"
+ $ echo bar > a
+ $ hg shelve
+ shelved as default
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ echo foobar > a
+ $ hg unshelve
+ unshelving change 'default'
+ temporarily committing pending changes (restore with 'hg unshelve --abort')
+ rebasing shelved changes
+ merging a
+ warning: conflicts while merging a! (edit, then use 'hg resolve --mark')
+ unresolved conflicts (see 'hg resolve', then 'hg unshelve --continue')
+ [1]
+
+ $ hg log --template '{desc|firstline} {author} {date|isodate} \n' -r .
+ pending changes temporary commit shelve@localhost 1970-01-01 00:00 +0000
+ $ hg merge --abort
+ abort: cannot abort merge with unshelve in progress
+ (use 'hg unshelve --continue' or 'hg unshelve --abort')
+ [255]
+
+ $ hg unshelve --abort
+ unshelve of 'default' aborted
+
+ $ hg log -G --template '{desc|firstline} {author} {date|isodate} \n' -r .
+ @ initial commit test 1970-01-01 00:00 +0000
+
+ $ cd ..
--- a/tests/test-simple-update.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-simple-update.t Mon Jul 22 14:00:33 2019 -0400
@@ -109,6 +109,7 @@
$ hg update -v | grep 100
getting 100
100 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ hg status
$ cd ..
--- a/tests/test-sparse-merges.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-sparse-merges.t Mon Jul 22 14:00:33 2019 -0400
@@ -114,8 +114,8 @@
$ hg merge
temporarily included 1 file(s) in the sparse checkout for merging
file 'd' was deleted in other [merge rev] but was modified in local [working copy].
- What do you want to do?
- use (c)hanged version, (d)elete, or leave (u)nresolved? u
+ You can use (c)hanged version, (d)elete, or leave (u)nresolved.
+ What do you want to do? u
0 files updated, 0 files merged, 0 files removed, 1 files unresolved
use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
[1]
--- a/tests/test-split.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-split.t Mon Jul 22 14:00:33 2019 -0400
@@ -135,22 +135,26 @@
$ HGEDITOR=false runsplit
diff --git a/a b/a
3 hunks, 3 lines changed
- examine changes to 'a'? [Ynesfdaq?] y
+ examine changes to 'a'?
+ (enter ? for help) [Ynesfdaq?] y
@@ -1,1 +1,1 @@
-1
+11
- record change 1/3 to 'a'? [Ynesfdaq?] n
+ record change 1/3 to 'a'?
+ (enter ? for help) [Ynesfdaq?] n
@@ -3,1 +3,1 @@ 2
-3
+33
- record change 2/3 to 'a'? [Ynesfdaq?] n
+ record change 2/3 to 'a'?
+ (enter ? for help) [Ynesfdaq?] n
@@ -5,1 +5,1 @@ 4
-5
+55
- record change 3/3 to 'a'? [Ynesfdaq?] y
+ record change 3/3 to 'a'?
+ (enter ? for help) [Ynesfdaq?] y
transaction abort!
rollback completed
@@ -162,22 +166,26 @@
$ runsplit
diff --git a/a b/a
3 hunks, 3 lines changed
- examine changes to 'a'? [Ynesfdaq?] y
+ examine changes to 'a'?
+ (enter ? for help) [Ynesfdaq?] y
@@ -1,1 +1,1 @@
-1
+11
- record change 1/3 to 'a'? [Ynesfdaq?] n
+ record change 1/3 to 'a'?
+ (enter ? for help) [Ynesfdaq?] n
@@ -3,1 +3,1 @@ 2
-3
+33
- record change 2/3 to 'a'? [Ynesfdaq?] n
+ record change 2/3 to 'a'?
+ (enter ? for help) [Ynesfdaq?] n
@@ -5,1 +5,1 @@ 4
-5
+55
- record change 3/3 to 'a'? [Ynesfdaq?] y
+ record change 3/3 to 'a'?
+ (enter ? for help) [Ynesfdaq?] y
EDITOR: HG: Splitting 1df0d5c5a3ab. Write commit message for the first split changeset.
EDITOR: a2
@@ -192,17 +200,20 @@
created new head
diff --git a/a b/a
2 hunks, 2 lines changed
- examine changes to 'a'? [Ynesfdaq?] y
+ examine changes to 'a'?
+ (enter ? for help) [Ynesfdaq?] y
@@ -1,1 +1,1 @@
-1
+11
- record change 1/2 to 'a'? [Ynesfdaq?] n
+ record change 1/2 to 'a'?
+ (enter ? for help) [Ynesfdaq?] n
@@ -3,1 +3,1 @@ 2
-3
+33
- record change 2/2 to 'a'? [Ynesfdaq?] y
+ record change 2/2 to 'a'?
+ (enter ? for help) [Ynesfdaq?] y
EDITOR: HG: Splitting 1df0d5c5a3ab. So far it has been split into:
EDITOR: HG: - e704349bd21b: split 1
@@ -218,12 +229,14 @@
EDITOR: HG: changed a
diff --git a/a b/a
1 hunks, 1 lines changed
- examine changes to 'a'? [Ynesfdaq?] y
+ examine changes to 'a'?
+ (enter ? for help) [Ynesfdaq?] y
@@ -1,1 +1,1 @@
-1
+11
- record this change to 'a'? [Ynesfdaq?] y
+ record this change to 'a'?
+ (enter ? for help) [Ynesfdaq?] y
EDITOR: HG: Splitting 1df0d5c5a3ab. So far it has been split into:
EDITOR: HG: - e704349bd21b: split 1
@@ -515,12 +528,14 @@
> EOF
diff --git a/B b/B
new file mode 100644
- examine changes to 'B'? [Ynesfdaq?] y
+ examine changes to 'B'?
+ (enter ? for help) [Ynesfdaq?] y
@@ -0,0 +1,1 @@
+B
\ No newline at end of file
- record this change to 'B'? [Ynesfdaq?] y
+ record this change to 'B'?
+ (enter ? for help) [Ynesfdaq?] y
EDITOR: HG: Splitting 112478962961. Write commit message for the first split changeset.
EDITOR: B
@@ -621,11 +636,13 @@
$ printf 'f\nn\nf\n' | hg --config extensions.split= --config diff.ignoreblanklines=1 split
diff --git a/bar b/bar
2 hunks, 2 lines changed
- examine changes to 'bar'? [Ynesfdaq?] f
+ examine changes to 'bar'?
+ (enter ? for help) [Ynesfdaq?] f
diff --git a/foo b/foo
1 hunks, 1 lines changed
- examine changes to 'foo'? [Ynesfdaq?] n
+ examine changes to 'foo'?
+ (enter ? for help) [Ynesfdaq?] n
EDITOR: HG: Splitting dd3c45017cbf. Write commit message for the first split changeset.
EDITOR: splitme
@@ -640,7 +657,8 @@
created new head
diff --git a/foo b/foo
1 hunks, 1 lines changed
- examine changes to 'foo'? [Ynesfdaq?] f
+ examine changes to 'foo'?
+ (enter ? for help) [Ynesfdaq?] f
EDITOR: HG: Splitting dd3c45017cbf. So far it has been split into:
EDITOR: HG: - f205aea1c624: split 1
@@ -675,11 +693,13 @@
$ printf 'f\nn\nf\n' | hg --config extensions.split= --config diff.ignoreblanklines=1 split
diff --git a/bar b/bar
1 hunks, 1 lines changed
- examine changes to 'bar'? [Ynesfdaq?] f
+ examine changes to 'bar'?
+ (enter ? for help) [Ynesfdaq?] f
diff --git a/foo b/foo
2 hunks, 2 lines changed
- examine changes to 'foo'? [Ynesfdaq?] n
+ examine changes to 'foo'?
+ (enter ? for help) [Ynesfdaq?] n
EDITOR: HG: Splitting 904c80b40a4a. Write commit message for the first split changeset.
EDITOR: splitme
@@ -694,7 +714,8 @@
created new head
diff --git a/foo b/foo
2 hunks, 2 lines changed
- examine changes to 'foo'? [Ynesfdaq?] f
+ examine changes to 'foo'?
+ (enter ? for help) [Ynesfdaq?] f
EDITOR: HG: Splitting 904c80b40a4a. So far it has been split into:
EDITOR: HG: - ffecf40fa954: split 1
@@ -739,7 +760,8 @@
diff --git a/foo b/foo
old mode 100644
new mode 100755
- examine changes to 'foo'? [Ynesfdaq?] y
+ examine changes to 'foo'?
+ (enter ? for help) [Ynesfdaq?] y
EDITOR: HG: Splitting 3a2125f0f4cb. Write commit message for the first split changeset.
EDITOR: make executable
--- a/tests/test-status.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-status.t Mon Jul 22 14:00:33 2019 -0400
@@ -630,6 +630,16 @@
b
R b
+ $ hg log -GTstatus -r 'wdir()' -C
+ o changeset: 2147483647:ffffffffffff
+ | parent: 0:8c55c58b4c0e
+ ~ user: test
+ date: * (glob)
+ files:
+ M a
+ b
+ R b
+
Other "bug" highlight, the revision status does not report the copy information.
This is buggy behavior.
--- a/tests/test-strip.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-strip.t Mon Jul 22 14:00:33 2019 -0400
@@ -272,7 +272,13 @@
date: Thu Jan 01 00:00:00 1970 +0000
summary: c
+##strip not allowed with merge in progress
$ hg strip 4
+ abort: outstanding uncommitted merge
+ (use 'hg commit' or 'hg merge --abort')
+ [255]
+##strip allowed --force with merge in progress
+ $ hg strip 4 --force
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
saved backup bundle to $TESTTMP/test/.hg/strip-backup/*-backup.hg (glob)
@@ -546,7 +552,7 @@
$ echo c > b
$ hg strip tip
- abort: local changes found
+ abort: uncommitted changes
[255]
$ hg strip tip --keep
saved backup bundle to $TESTTMP/test/.hg/strip-backup/*-backup.hg (glob)
@@ -698,7 +704,7 @@
$ echo a > a
$ hg add a
$ hg strip -B B
- abort: local changes found
+ abort: uncommitted changes
[255]
$ hg bookmarks
* B 6:ff43616e5d0f
@@ -855,7 +861,7 @@
bundle2-output-part: "phase-heads" 24 bytes payload
saved backup bundle to $TESTTMP/issue4736/.hg/strip-backup/6625a5168474-345bb43d-backup.hg
updating the branch cache
- invalid branchheads cache (served): tip differs
+ invalid branch cache (served): tip differs
$ hg log -G
o changeset: 2:5c51d8d6557d
| tag: tip
--- a/tests/test-subrepo-git.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-subrepo-git.t Mon Jul 22 14:00:33 2019 -0400
@@ -213,7 +213,8 @@
(run 'hg heads' to see heads, 'hg merge' to merge)
$ hg merge 2>/dev/null
subrepository s diverged (local revision: 7969594, remote revision: aa84837)
- (M)erge, keep (l)ocal [working copy] or keep (r)emote [merge rev]? m
+ you can (m)erge, keep (l)ocal [working copy] or keep (r)emote [merge rev].
+ what do you want to do? m
pulling subrepo s from $TESTTMP/gitroot
0 files updated, 0 files merged, 0 files removed, 0 files unresolved
(branch merge, don't forget to commit)
@@ -551,9 +552,11 @@
$ cd ..
$ hg update 4
subrepository s diverged (local revision: da5f5b1, remote revision: aa84837)
- (M)erge, keep (l)ocal [working copy] or keep (r)emote [destination]? m
+ you can (m)erge, keep (l)ocal [working copy] or keep (r)emote [destination].
+ what do you want to do? m
subrepository sources for s differ
- use (l)ocal source (da5f5b1) or (r)emote source (aa84837)? l
+ you can use (l)ocal source (da5f5b1) or (r)emote source (aa84837).
+ what do you want to do? l
0 files updated, 0 files merged, 0 files removed, 0 files unresolved
$ hg id -n
4+
@@ -577,9 +580,11 @@
$ cd ..
$ hg update 1
subrepository s diverged (local revision: 32a3438, remote revision: da5f5b1)
- (M)erge, keep (l)ocal [working copy] or keep (r)emote [destination]? m
+ you can (m)erge, keep (l)ocal [working copy] or keep (r)emote [destination].
+ what do you want to do? m
subrepository sources for s differ (in checked out version)
- use (l)ocal source (32a3438) or (r)emote source (da5f5b1)? l
+ you can use (l)ocal source (32a3438) or (r)emote source (da5f5b1).
+ what do you want to do? l
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
$ hg id -n
1+
@@ -599,9 +604,11 @@
1+
$ hg update 7
subrepository s diverged (local revision: 32a3438, remote revision: 32a3438)
- (M)erge, keep (l)ocal [working copy] or keep (r)emote [destination]? m
+ you can (m)erge, keep (l)ocal [working copy] or keep (r)emote [destination].
+ what do you want to do? m
subrepository sources for s differ
- use (l)ocal source (32a3438) or (r)emote source (32a3438)? l
+ you can use (l)ocal source (32a3438) or (r)emote source (32a3438).
+ what do you want to do? l
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
$ hg id -n
7+
--- a/tests/test-subrepo-svn.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-subrepo-svn.t Mon Jul 22 14:00:33 2019 -0400
@@ -321,9 +321,11 @@
$ cd ..
$ hg update tip
subrepository s diverged (local revision: 2, remote revision: 3)
- (M)erge, keep (l)ocal [working copy] or keep (r)emote [destination]? m
+ you can (m)erge, keep (l)ocal [working copy] or keep (r)emote [destination].
+ what do you want to do? m
subrepository sources for s differ
- use (l)ocal source (2) or (r)emote source (3)? l
+ you can use (l)ocal source (2) or (r)emote source (3).
+ what do you want to do? l
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
$ hg id -n
2+
@@ -352,9 +354,11 @@
$ cd ..
$ hg update 1
subrepository s diverged (local revision: 3, remote revision: 2)
- (M)erge, keep (l)ocal [working copy] or keep (r)emote [destination]? m
+ you can (m)erge, keep (l)ocal [working copy] or keep (r)emote [destination].
+ what do you want to do? m
subrepository sources for s differ (in checked out version)
- use (l)ocal source (1) or (r)emote source (2)? l
+ you can use (l)ocal source (1) or (r)emote source (2).
+ what do you want to do? l
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
$ hg id -n
1+
@@ -375,9 +379,11 @@
1+
$ hg update tip
subrepository s diverged (local revision: 3, remote revision: 3)
- (M)erge, keep (l)ocal [working copy] or keep (r)emote [destination]? m
+ you can (m)erge, keep (l)ocal [working copy] or keep (r)emote [destination].
+ what do you want to do? m
subrepository sources for s differ
- use (l)ocal source (1) or (r)emote source (3)? l
+ you can use (l)ocal source (1) or (r)emote source (3).
+ what do you want to do? l
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
$ hg id -n
2+
@@ -409,7 +415,8 @@
$ cd ..
$ hg update 1
subrepository s diverged (local revision: 3, remote revision: 2)
- (M)erge, keep (l)ocal [working copy] or keep (r)emote [destination]? m
+ you can (m)erge, keep (l)ocal [working copy] or keep (r)emote [destination].
+ what do you want to do? m
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
$ hg id -n
1+
--- a/tests/test-subrepo.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-subrepo.t Mon Jul 22 14:00:33 2019 -0400
@@ -274,7 +274,6 @@
$ hg ci -m9
created new head
$ hg merge 6 --debug # test change
- searching for copies back to rev 2
resolving manifests
branchmerge: True, force: False, partial: False
ancestor: 1f14a2e2d3ec, local: f0d2028bf86d+, remote: 1831e14459c4
@@ -301,7 +300,6 @@
$ hg ci -m10
committing subrepository t
$ HGMERGE=internal:merge hg merge --debug 7 # test conflict
- searching for copies back to rev 2
resolving manifests
branchmerge: True, force: False, partial: False
ancestor: 1831e14459c4, local: e45c8b14af55+, remote: f94576341bcf
@@ -311,9 +309,9 @@
subrepo t: both sides changed
subrepository t diverged (local revision: 20a0db6fbf6c, remote revision: 7af322bc1198)
starting 4 threads for background file closing (?)
- (M)erge, keep (l)ocal [working copy] or keep (r)emote [merge rev]? m
+ you can (m)erge, keep (l)ocal [working copy] or keep (r)emote [merge rev].
+ what do you want to do? m
merging subrepository "t"
- searching for copies back to rev 2
resolving manifests
branchmerge: True, force: False, partial: False
ancestor: 6747d179aa9a, local: 20a0db6fbf6c+, remote: 7af322bc1198
@@ -911,7 +909,8 @@
0 files updated, 0 files merged, 0 files removed, 0 files unresolved
$ hg merge 4 # try to merge default into br again
subrepository s diverged (local revision: f8f13b33206e, remote revision: a3f9062a4f88)
- (M)erge, keep (l)ocal [working copy] or keep (r)emote [merge rev]? m
+ you can (m)erge, keep (l)ocal [working copy] or keep (r)emote [merge rev].
+ what do you want to do? m
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
(branch merge, don't forget to commit)
$ cd ..
@@ -1202,7 +1201,8 @@
added 1 changesets with 2 changes to 2 files
new changesets c82b79fdcc5b
subrepository sub/repo diverged (local revision: f42d5c7504a8, remote revision: 46cd4aac504c)
- (M)erge, keep (l)ocal [working copy] or keep (r)emote [destination]? m
+ you can (m)erge, keep (l)ocal [working copy] or keep (r)emote [destination].
+ what do you want to do? m
pulling subrepo sub/repo from $TESTTMP/issue1852a/sub/repo
searching for changes
adding changesets
@@ -1211,7 +1211,8 @@
added 1 changesets with 1 changes to 1 files
new changesets 46cd4aac504c
subrepository sources for sub/repo differ
- use (l)ocal source (f42d5c7504a8) or (r)emote source (46cd4aac504c)? l
+ you can use (l)ocal source (f42d5c7504a8) or (r)emote source (46cd4aac504c).
+ what do you want to do? l
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
$ cat issue1852d/.hgsubstate
f42d5c7504a811dda50f5cf3e5e16c3330b87172 sub/repo
@@ -1338,13 +1339,17 @@
e95bcfa18a35+
$ hg update tip
subrepository s diverged (local revision: fc627a69481f, remote revision: 12a213df6fa9)
- (M)erge, keep (l)ocal [working copy] or keep (r)emote [destination]? m
+ you can (m)erge, keep (l)ocal [working copy] or keep (r)emote [destination].
+ what do you want to do? m
subrepository sources for s differ
- use (l)ocal source (fc627a69481f) or (r)emote source (12a213df6fa9)? l
+ you can use (l)ocal source (fc627a69481f) or (r)emote source (12a213df6fa9).
+ what do you want to do? l
subrepository t diverged (local revision: e95bcfa18a35, remote revision: 52c0adc0515a)
- (M)erge, keep (l)ocal [working copy] or keep (r)emote [destination]? m
+ you can (m)erge, keep (l)ocal [working copy] or keep (r)emote [destination].
+ what do you want to do? m
subrepository sources for t differ
- use (l)ocal source (e95bcfa18a35) or (r)emote source (52c0adc0515a)? l
+ you can use (l)ocal source (e95bcfa18a35) or (r)emote source (52c0adc0515a).
+ what do you want to do? l
0 files updated, 0 files merged, 0 files removed, 0 files unresolved
$ hg id
925c17564ef8+ tip
@@ -1371,11 +1376,14 @@
$ cd ..
$ hg update 10
subrepository s diverged (local revision: 12a213df6fa9, remote revision: fc627a69481f)
- (M)erge, keep (l)ocal [working copy] or keep (r)emote [destination]? m
+ you can (m)erge, keep (l)ocal [working copy] or keep (r)emote [destination].
+ what do you want to do? m
subrepository t diverged (local revision: 52c0adc0515a, remote revision: 20a0db6fbf6c)
- (M)erge, keep (l)ocal [working copy] or keep (r)emote [destination]? m
+ you can (m)erge, keep (l)ocal [working copy] or keep (r)emote [destination].
+ what do you want to do? m
subrepository sources for t differ (in checked out version)
- use (l)ocal source (7af322bc1198) or (r)emote source (20a0db6fbf6c)? l
+ you can use (l)ocal source (7af322bc1198) or (r)emote source (20a0db6fbf6c).
+ what do you want to do? l
0 files updated, 0 files merged, 0 files removed, 0 files unresolved
$ hg id
e45c8b14af55+
@@ -1397,13 +1405,17 @@
7af322bc1198+
$ hg update tip
subrepository s diverged (local revision: 12a213df6fa9, remote revision: 12a213df6fa9)
- (M)erge, keep (l)ocal [working copy] or keep (r)emote [destination]? m
+ you can (m)erge, keep (l)ocal [working copy] or keep (r)emote [destination].
+ what do you want to do? m
subrepository sources for s differ
- use (l)ocal source (02dcf1d70411) or (r)emote source (12a213df6fa9)? l
+ you can use (l)ocal source (02dcf1d70411) or (r)emote source (12a213df6fa9).
+ what do you want to do? l
subrepository t diverged (local revision: 52c0adc0515a, remote revision: 52c0adc0515a)
- (M)erge, keep (l)ocal [working copy] or keep (r)emote [destination]? m
+ you can (m)erge, keep (l)ocal [working copy] or keep (r)emote [destination].
+ what do you want to do? m
subrepository sources for t differ
- use (l)ocal source (7af322bc1198) or (r)emote source (52c0adc0515a)? l
+ you can use (l)ocal source (7af322bc1198) or (r)emote source (52c0adc0515a).
+ what do you want to do? l
0 files updated, 0 files merged, 0 files removed, 0 files unresolved
$ hg id
925c17564ef8+ tip
@@ -1429,7 +1441,8 @@
$ cd ..
$ hg update 11
subrepository s diverged (local revision: 12a213df6fa9, remote revision: fc627a69481f)
- (M)erge, keep (l)ocal [working copy] or keep (r)emote [destination]? m
+ you can (m)erge, keep (l)ocal [working copy] or keep (r)emote [destination].
+ what do you want to do? m
0 files updated, 0 files merged, 0 files removed, 0 files unresolved
0 files updated, 0 files merged, 0 files removed, 0 files unresolved
$ hg id -n
--- a/tests/test-tags.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-tags.t Mon Jul 22 14:00:33 2019 -0400
@@ -145,7 +145,7 @@
$ hg blackbox -l 6
1970/01/01 00:00:00 bob @b9154636be938d3d431e75a7c906504a079bfe07 (5000)> identify
1970/01/01 00:00:00 bob @b9154636be938d3d431e75a7c906504a079bfe07 (5000)> writing 48 bytes to cache/hgtagsfnodes1
- 1970/01/01 00:00:00 bob @b9154636be938d3d431e75a7c906504a079bfe07 (5000)> 0/1 cache hits/lookups in * seconds (glob)
+ 1970/01/01 00:00:00 bob @b9154636be938d3d431e75a7c906504a079bfe07 (5000)> 0/2 cache hits/lookups in * seconds (glob)
1970/01/01 00:00:00 bob @b9154636be938d3d431e75a7c906504a079bfe07 (5000)> writing .hg/cache/tags2-visible with 1 tags
1970/01/01 00:00:00 bob @b9154636be938d3d431e75a7c906504a079bfe07 (5000)> identify exited 0 after * seconds (glob)
1970/01/01 00:00:00 bob @b9154636be938d3d431e75a7c906504a079bfe07 (5000)> blackbox -l 6
@@ -159,7 +159,7 @@
$ hg blackbox -l 6
1970/01/01 00:00:00 bob @b9154636be938d3d431e75a7c906504a079bfe07 (5000)> identify
1970/01/01 00:00:00 bob @b9154636be938d3d431e75a7c906504a079bfe07 (5000)> not writing .hg/cache/hgtagsfnodes1 because lock cannot be acquired
- 1970/01/01 00:00:00 bob @b9154636be938d3d431e75a7c906504a079bfe07 (5000)> 0/1 cache hits/lookups in * seconds (glob)
+ 1970/01/01 00:00:00 bob @b9154636be938d3d431e75a7c906504a079bfe07 (5000)> 0/2 cache hits/lookups in * seconds (glob)
1970/01/01 00:00:00 bob @b9154636be938d3d431e75a7c906504a079bfe07 (5000)> writing .hg/cache/tags2-visible with 1 tags
1970/01/01 00:00:00 bob @b9154636be938d3d431e75a7c906504a079bfe07 (5000)> identify exited 0 after * seconds (glob)
1970/01/01 00:00:00 bob @b9154636be938d3d431e75a7c906504a079bfe07 (5000)> blackbox -l 6
@@ -363,7 +363,7 @@
$ hg blackbox -l 6
1970/01/01 00:00:00 bob @8dbfe60eff306a54259cfe007db9e330e7ecf866 (5000)> tags
1970/01/01 00:00:00 bob @8dbfe60eff306a54259cfe007db9e330e7ecf866 (5000)> writing 24 bytes to cache/hgtagsfnodes1
- 1970/01/01 00:00:00 bob @8dbfe60eff306a54259cfe007db9e330e7ecf866 (5000)> 2/3 cache hits/lookups in * seconds (glob)
+ 1970/01/01 00:00:00 bob @8dbfe60eff306a54259cfe007db9e330e7ecf866 (5000)> 3/4 cache hits/lookups in * seconds (glob)
1970/01/01 00:00:00 bob @8dbfe60eff306a54259cfe007db9e330e7ecf866 (5000)> writing .hg/cache/tags2-visible with 1 tags
1970/01/01 00:00:00 bob @8dbfe60eff306a54259cfe007db9e330e7ecf866 (5000)> tags exited 0 after * seconds (glob)
1970/01/01 00:00:00 bob @8dbfe60eff306a54259cfe007db9e330e7ecf866 (5000)> blackbox -l 6
@@ -384,7 +384,7 @@
$ hg blackbox -l 6
1970/01/01 00:00:00 bob @b968051b5cf3f624b771779c6d5f84f1d4c3fb5d (5000)> tags
1970/01/01 00:00:00 bob @b968051b5cf3f624b771779c6d5f84f1d4c3fb5d (5000)> couldn't write cache/hgtagsfnodes1: [Errno *] * (glob)
- 1970/01/01 00:00:00 bob @b968051b5cf3f624b771779c6d5f84f1d4c3fb5d (5000)> 2/3 cache hits/lookups in * seconds (glob)
+ 1970/01/01 00:00:00 bob @b968051b5cf3f624b771779c6d5f84f1d4c3fb5d (5000)> 3/4 cache hits/lookups in * seconds (glob)
1970/01/01 00:00:00 bob @b968051b5cf3f624b771779c6d5f84f1d4c3fb5d (5000)> writing .hg/cache/tags2-visible with 1 tags
1970/01/01 00:00:00 bob @b968051b5cf3f624b771779c6d5f84f1d4c3fb5d (5000)> tags exited 0 after * seconds (glob)
1970/01/01 00:00:00 bob @b968051b5cf3f624b771779c6d5f84f1d4c3fb5d (5000)> blackbox -l 6
@@ -399,7 +399,7 @@
$ hg blackbox -l 6
1970/01/01 00:00:00 bob @b968051b5cf3f624b771779c6d5f84f1d4c3fb5d (5000)> tags
1970/01/01 00:00:00 bob @b968051b5cf3f624b771779c6d5f84f1d4c3fb5d (5000)> writing 24 bytes to cache/hgtagsfnodes1
- 1970/01/01 00:00:00 bob @b968051b5cf3f624b771779c6d5f84f1d4c3fb5d (5000)> 2/3 cache hits/lookups in * seconds (glob)
+ 1970/01/01 00:00:00 bob @b968051b5cf3f624b771779c6d5f84f1d4c3fb5d (5000)> 3/4 cache hits/lookups in * seconds (glob)
1970/01/01 00:00:00 bob @b968051b5cf3f624b771779c6d5f84f1d4c3fb5d (5000)> writing .hg/cache/tags2-visible with 1 tags
1970/01/01 00:00:00 bob @b968051b5cf3f624b771779c6d5f84f1d4c3fb5d (5000)> tags exited 0 after * seconds (glob)
1970/01/01 00:00:00 bob @b968051b5cf3f624b771779c6d5f84f1d4c3fb5d (5000)> blackbox -l 6
@@ -427,7 +427,7 @@
$ hg blackbox -l 5
1970/01/01 00:00:00 bob @0c192d7d5e6b78a714de54a2e9627952a877e25a (5000)> writing 24 bytes to cache/hgtagsfnodes1
- 1970/01/01 00:00:00 bob @0c192d7d5e6b78a714de54a2e9627952a877e25a (5000)> 2/3 cache hits/lookups in * seconds (glob)
+ 1970/01/01 00:00:00 bob @0c192d7d5e6b78a714de54a2e9627952a877e25a (5000)> 2/4 cache hits/lookups in * seconds (glob)
1970/01/01 00:00:00 bob @0c192d7d5e6b78a714de54a2e9627952a877e25a (5000)> writing .hg/cache/tags2-visible with 1 tags
1970/01/01 00:00:00 bob @0c192d7d5e6b78a714de54a2e9627952a877e25a (5000)> tags exited 0 after * seconds (glob)
1970/01/01 00:00:00 bob @0c192d7d5e6b78a714de54a2e9627952a877e25a (5000)> blackbox -l 5
@@ -445,7 +445,7 @@
$ hg blackbox -l 6
1970/01/01 00:00:00 bob @035f65efb448350f4772141702a81ab1df48c465 (5000)> tags
1970/01/01 00:00:00 bob @035f65efb448350f4772141702a81ab1df48c465 (5000)> writing 24 bytes to cache/hgtagsfnodes1
- 1970/01/01 00:00:00 bob @035f65efb448350f4772141702a81ab1df48c465 (5000)> 2/3 cache hits/lookups in * seconds (glob)
+ 1970/01/01 00:00:00 bob @035f65efb448350f4772141702a81ab1df48c465 (5000)> 3/4 cache hits/lookups in * seconds (glob)
1970/01/01 00:00:00 bob @035f65efb448350f4772141702a81ab1df48c465 (5000)> writing .hg/cache/tags2-visible with 1 tags
1970/01/01 00:00:00 bob @035f65efb448350f4772141702a81ab1df48c465 (5000)> tags exited 0 after * seconds (glob)
1970/01/01 00:00:00 bob @035f65efb448350f4772141702a81ab1df48c465 (5000)> blackbox -l 6
--- a/tests/test-template-keywords.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-template-keywords.t Mon Jul 22 14:00:33 2019 -0400
@@ -798,6 +798,23 @@
fourth
third
+Test files lists on merge commit:
+
+ $ hg co '.^' -q
+ $ touch c
+ $ hg add c
+ $ hg ci -qm 'add file'
+ $ hg merge 10 -q
+ $ hg ci -m 'merge'
+ $ hg log -l1 -T '{files}\n'
+
+ $ hg log -l1 -T '{file_mods}\n'
+
+ $ hg log -l1 -T '{file_adds}\n'
+
+ $ hg log -l1 -T '{file_dels}\n'
+
+
Test file copies dict:
$ hg log -r8 -T '{join(file_copies, " ")}\n'
@@ -818,7 +835,7 @@
Test file attributes:
- $ hg log -l1 -T '{files % "{status} {pad(size, 3, left=True)} {path}\n"}'
+ $ hg log -r10 -T '{files % "{status} {pad(size, 3, left=True)} {path}\n"}'
R a
A 0 b
A 7 fifth
@@ -834,7 +851,7 @@
Test index keyword:
- $ hg log -l 2 -T '{index + 10}{files % " {index}:{file}"}\n'
+ $ hg log -r 10:9 -T '{index + 10}{files % " {index}:{file}"}\n'
10 0:a 1:b 2:fifth 3:fourth 4:third
11 0:a
--- a/tests/test-transplant.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-transplant.t Mon Jul 22 14:00:33 2019 -0400
@@ -40,6 +40,7 @@
(branch merge, don't forget to commit)
$ hg transplant 1
abort: outstanding uncommitted merge
+ (use 'hg commit' or 'hg merge --abort')
[255]
$ hg up -qC tip
$ echo b0 > b1
@@ -461,7 +462,7 @@
baz
foo
-test multiple revisions and --continue
+test multiple revisions, --continue and hg status --verbose
$ hg up -qC 0
$ echo bazbaz > baz
@@ -481,6 +482,15 @@
abort: transplant in progress
(use 'hg transplant --continue' or 'hg update' to abort)
[255]
+ $ hg status -v
+ A bar
+ ? baz.rej
+ ? foo.rej
+ # The repository is in an unfinished *transplant* state.
+
+ # To continue: hg transplant --continue
+ # To abort: hg update
+
$ echo fixed > baz
$ hg transplant --continue
9d6d6b5a8275 transplanted as d80c49962290
--- a/tests/test-treemanifest.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-treemanifest.t Mon Jul 22 14:00:33 2019 -0400
@@ -357,10 +357,10 @@
Shelving and unshelving should work
$ echo foo >> dir1/a
- $ hg --config extensions.shelve= shelve
+ $ hg shelve
shelved as default
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
- $ hg --config extensions.shelve= unshelve
+ $ hg unshelve
unshelving change 'default'
$ hg diff --nodates
diff -r 708a273da119 dir1/a
--- a/tests/test-up-local-change.t Tue Jul 09 10:07:35 2019 -0400
+++ b/tests/test-up-local-change.t Mon Jul 22 14:00:33 2019 -0400
@@ -40,7 +40,6 @@
summary: 1
$ hg --debug up
- searching for copies back to rev 1
unmatched files in other:
b
resolving manifests
@@ -68,9 +67,6 @@
$ hg --debug up 0
starting 4 threads for background file closing (?)
- searching for copies back to rev 0
- unmatched files in local (from topological common ancestor):
- b
resolving manifests
branchmerge: False, force: False, partial: False
ancestor: 1e71731e6fbb, local: 1e71731e6fbb+, remote: c19d34741b0a
@@ -95,7 +91,6 @@
summary: 1
$ hg --debug up
- searching for copies back to rev 1
unmatched files in other:
b
resolving manifests