# HG changeset patch
# User Augie Fackler
# Date 1618930866 14400
# Node ID f67b8946bb1b6cfa8328dbf8d6a9128b69ccdcb4
# Parent bc268ea9f9843d65586186c0c735001510dd1daf# Parent 5fa019ceb49950dccd1bb28dc4a16f657a083e4c
merge: default into stable for 5.8 rc
diff -r bc268ea9f984 -r f67b8946bb1b Makefile
--- a/Makefile Thu Mar 25 19:06:28 2021 -0400
+++ b/Makefile Tue Apr 20 11:01:06 2021 -0400
@@ -68,6 +68,12 @@
build:
$(PYTHON) setup.py $(PURE) build $(COMPILERFLAG)
+build-chg:
+ make -C contrib/chg
+
+build-rhg:
+ (cd rust/rhg; cargo build --release)
+
wheel:
FORCE_SETUPTOOLS=1 $(PYTHON) setup.py $(PURE) bdist_wheel $(COMPILERFLAG)
@@ -96,6 +102,9 @@
install-bin: build
$(PYTHON) setup.py $(PURE) install --root="$(DESTDIR)/" --prefix="$(PREFIX)" --force
+install-chg: build-chg
+ make -C contrib/chg install PREFIX="$(PREFIX)"
+
install-doc: doc
cd doc && $(MAKE) $(MFLAGS) install
@@ -107,6 +116,9 @@
install-home-doc: doc
cd doc && $(MAKE) $(MFLAGS) PREFIX="$(HOME)" install
+install-rhg: build-rhg
+ install -m 755 rust/target/release/rhg "$(PREFIX)"/bin/
+
MANIFEST-doc:
$(MAKE) -C doc MANIFEST
@@ -175,7 +187,7 @@
$(PYFILESCMD) | xargs \
xgettext --package-name "Mercurial" \
--msgid-bugs-address "" \
- --copyright-holder "Matt Mackall and others" \
+ --copyright-holder "Olivia Mackall and others" \
--from-code ISO-8859-1 --join --sort-by-file --add-comments=i18n: \
-d hg -p i18n -o hg.pot.tmp
$(PYTHON) i18n/posplit i18n/hg.pot.tmp
diff -r bc268ea9f984 -r f67b8946bb1b README.rst
--- a/README.rst Thu Mar 25 19:06:28 2021 -0400
+++ b/README.rst Tue Apr 20 11:01:06 2021 -0400
@@ -18,3 +18,13 @@
See https://mercurial-scm.org/ for detailed installation
instructions, platform-specific notes, and Mercurial user information.
+
+Notes for packagers
+===================
+
+Mercurial ships a copy of the python-zstandard sources. This is used to
+provide support for zstd compression and decompression functionality. The
+module is not intended to be replaced by the plain python-zstandard nor
+is it intended to use a system zstd library. Patches can result in hard
+to diagnose errors and are explicitly discouraged as unsupported
+configuration.
diff -r bc268ea9f984 -r f67b8946bb1b black.toml
--- a/black.toml Thu Mar 25 19:06:28 2021 -0400
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,14 +0,0 @@
-[tool.black]
-line-length = 80
-exclude = '''
-build/
-| wheelhouse/
-| dist/
-| packages/
-| \.hg/
-| \.mypy_cache/
-| \.venv/
-| mercurial/thirdparty/
-'''
-skip-string-normalization = true
-quiet = true
diff -r bc268ea9f984 -r f67b8946bb1b contrib/all-revsets.txt
--- a/contrib/all-revsets.txt Thu Mar 25 19:06:28 2021 -0400
+++ b/contrib/all-revsets.txt Tue Apr 20 11:01:06 2021 -0400
@@ -46,8 +46,8 @@
# Used in revision c1546d7400ef
min(0::)
# Used in revision 546fa6576815
-author(lmoscovicz) or author(mpm)
-author(mpm) or author(lmoscovicz)
+author(lmoscovicz) or author(olivia)
+author(olivia) or author(lmoscovicz)
# Used in revision 9bfe68357c01
public() and id("d82e2223f132")
# Used in revision ba89f7b542c9
@@ -100,7 +100,7 @@
draft() and ::tip
::tip and draft()
author(lmoscovicz)
-author(mpm)
+author(olivia)
::p1(p1(tip))::
public()
:10000 and public()
@@ -130,7 +130,7 @@
head()
head() - public()
draft() and head()
-head() and author("mpm")
+head() and author("olivia")
# testing the mutable phases set
draft()
diff -r bc268ea9f984 -r f67b8946bb1b contrib/base-revsets.txt
--- a/contrib/base-revsets.txt Thu Mar 25 19:06:28 2021 -0400
+++ b/contrib/base-revsets.txt Tue Apr 20 11:01:06 2021 -0400
@@ -25,9 +25,9 @@
0::tip
roots(0::tip)
author(lmoscovicz)
-author(mpm)
-author(lmoscovicz) or author(mpm)
-author(mpm) or author(lmoscovicz)
+author(olivia)
+author(lmoscovicz) or author(olivia)
+author(olivia) or author(lmoscovicz)
tip:0
0::
# those two `roots(...)` inputs are close to what phase movement use.
diff -r bc268ea9f984 -r f67b8946bb1b contrib/check-code.py
--- a/contrib/check-code.py Thu Mar 25 19:06:28 2021 -0400
+++ b/contrib/check-code.py Tue Apr 20 11:01:06 2021 -0400
@@ -2,7 +2,7 @@
#
# check-code - a style and portability checker for Mercurial
#
-# Copyright 2010 Matt Mackall
+# Copyright 2010 Olivia Mackall
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
diff -r bc268ea9f984 -r f67b8946bb1b contrib/check-commit
--- a/contrib/check-commit Thu Mar 25 19:06:28 2021 -0400
+++ b/contrib/check-commit Tue Apr 20 11:01:06 2021 -0400
@@ -1,6 +1,6 @@
#!/usr/bin/env python3
#
-# Copyright 2014 Matt Mackall
+# Copyright 2014 Olivia Mackall
#
# A tool/hook to run basic sanity checks on commits/patches for
# submission to Mercurial. Install by adding the following to your
diff -r bc268ea9f984 -r f67b8946bb1b contrib/check-config.py
--- a/contrib/check-config.py Thu Mar 25 19:06:28 2021 -0400
+++ b/contrib/check-config.py Tue Apr 20 11:01:06 2021 -0400
@@ -2,7 +2,7 @@
#
# check-config - a config flag documentation checker for Mercurial
#
-# Copyright 2015 Matt Mackall
+# Copyright 2015 Olivia Mackall
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
diff -r bc268ea9f984 -r f67b8946bb1b contrib/chg/chg.1
--- a/contrib/chg/chg.1 Thu Mar 25 19:06:28 2021 -0400
+++ b/contrib/chg/chg.1 Tue Apr 20 11:01:06 2021 -0400
@@ -36,6 +36,6 @@
.B \-\-kill\-chg\-daemon
Terminate the background command servers.
.SH SEE ALSO
-.BR hg (1),
+.BR hg (1)
.SH AUTHOR
Written by Yuya Nishihara .
diff -r bc268ea9f984 -r f67b8946bb1b contrib/clang-format-ignorelist
--- a/contrib/clang-format-ignorelist Thu Mar 25 19:06:28 2021 -0400
+++ b/contrib/clang-format-ignorelist Tue Apr 20 11:01:06 2021 -0400
@@ -9,3 +9,4 @@
hgext/fsmonitor/pywatchman/**.c
mercurial/thirdparty/**.c
mercurial/thirdparty/**.h
+mercurial/pythoncapi_compat.h
diff -r bc268ea9f984 -r f67b8946bb1b contrib/examples/fix.hgrc
--- a/contrib/examples/fix.hgrc Thu Mar 25 19:06:28 2021 -0400
+++ b/contrib/examples/fix.hgrc Tue Apr 20 11:01:06 2021 -0400
@@ -5,7 +5,7 @@
rustfmt:command = rustfmt +nightly
rustfmt:pattern = set:"**.rs" - "mercurial/thirdparty/**"
-black:command = black --config=black.toml -
+black:command = black --config=pyproject.toml -
black:pattern = set:**.py - mercurial/thirdparty/**
# Mercurial doesn't have any Go code, but if we did this is how we
diff -r bc268ea9f984 -r f67b8946bb1b contrib/fuzz/Makefile
--- a/contrib/fuzz/Makefile Thu Mar 25 19:06:28 2021 -0400
+++ b/contrib/fuzz/Makefile Tue Apr 20 11:01:06 2021 -0400
@@ -1,5 +1,5 @@
-CC = clang
-CXX = clang++
+CC ?= clang
+CXX ?= clang++
# By default, use our own standalone_fuzz_target_runner.
# This runner does no fuzzing, but simply executes the inputs
@@ -10,6 +10,15 @@
# OSS-Fuzz will define its own value for LIB_FUZZING_ENGINE.
LIB_FUZZING_ENGINE ?= standalone_fuzz_target_runner.o
+# Default to Python 3.
+#
+# Windows ships Python 3 as `python.exe`, which may not be on PATH. py.exe is.
+ifeq ($(OS),Windows_NT)
+PYTHON?=py -3
+else
+PYTHON?=python3
+endif
+
PYTHON_CONFIG ?= $$OUT/sanpy/bin/python-config
PYTHON_CONFIG_FLAGS ?= --ldflags --embed
@@ -20,7 +29,7 @@
standalone_fuzz_target_runner.o: standalone_fuzz_target_runner.cc
$$OUT/%_fuzzer_seed_corpus.zip: %_corpus.py
- python $< $@
+ $(PYTHON) $< $@
pyutil.o: pyutil.cc pyutil.h
$(CXX) $(CXXFLAGS) -g -O1 \
diff -r bc268ea9f984 -r f67b8946bb1b contrib/heptapod-ci.yml
--- a/contrib/heptapod-ci.yml Thu Mar 25 19:06:28 2021 -0400
+++ b/contrib/heptapod-ci.yml Tue Apr 20 11:01:06 2021 -0400
@@ -7,6 +7,8 @@
variables:
PYTHON: python
TEST_HGMODULEPOLICY: "allow"
+ HG_CI_IMAGE_TAG: "latest"
+ TEST_HGTESTS_ALLOW_NETIO: "0"
.runtests_template: &runtests
stage: tests
@@ -17,21 +19,12 @@
- hg -R /tmp/mercurial-ci/ update `hg log --rev '.' --template '{node}'`
- cd /tmp/mercurial-ci/
- ls -1 tests/test-check-*.* > /tmp/check-tests.txt
+ - black --version
+ - clang-format --version
script:
- echo "python used, $PYTHON"
- echo "$RUNTEST_ARGS"
- - HGMODULEPOLICY="$TEST_HGMODULEPOLICY" "$PYTHON" tests/run-tests.py --color=always $RUNTEST_ARGS
-
-
-.rust_template: &rust
- before_script:
- - hg clone . /tmp/mercurial-ci/ --noupdate --config phases.publish=no
- - hg -R /tmp/mercurial-ci/ update `hg log --rev '.' --template '{node}'`
- - ls -1 tests/test-check-*.* > /tmp/check-tests.txt
- - cd /tmp/mercurial-ci/rust/rhg
- - cargo build
- - cd /tmp/mercurial-ci/
-
+ - HGTESTS_ALLOW_NETIO="$TEST_HGTESTS_ALLOW_NETIO" HGMODULEPOLICY="$TEST_HGMODULEPOLICY" "$PYTHON" tests/run-tests.py --color=always $RUNTEST_ARGS
checks-py2:
<<: *runtests
@@ -58,14 +51,23 @@
phabricator-refresh:
stage: phabricator
+ variables:
+ DEFAULT_COMMENT: ":white_check_mark: refresh by Heptapod after a successful CI run (:octopus: :green_heart:)"
+ STABLE_COMMENT: ":white_check_mark: refresh by Heptapod after a successful CI run (:octopus: :green_heart:)\n⚠ This patch is intended for stable ⚠\n{image https://media.giphy.com/media/nYI8SmmChYXK0/source.gif}"
script:
- - "./contrib/phab-refresh-stack.sh --comment \":white_check_mark: refresh by Heptapod after a successful CI run (:octopus: :green_heart:)\""
+ - |
+ if [ `hg branch` == "stable" ]; then
+ ./contrib/phab-refresh-stack.sh --comment "$STABLE_COMMENT";
+ else
+ ./contrib/phab-refresh-stack.sh --comment "$DEFAULT_COMMENT";
+ fi
test-py2:
<<: *runtests
variables:
RUNTEST_ARGS: " --no-rust --blacklist /tmp/check-tests.txt"
TEST_HGMODULEPOLICY: "c"
+ TEST_HGTESTS_ALLOW_NETIO: "1"
test-py3:
<<: *runtests
@@ -73,6 +75,7 @@
RUNTEST_ARGS: " --no-rust --blacklist /tmp/check-tests.txt"
PYTHON: python3
TEST_HGMODULEPOLICY: "c"
+ TEST_HGTESTS_ALLOW_NETIO: "1"
test-py2-pure:
<<: *runtests
@@ -89,7 +92,6 @@
test-py2-rust:
<<: *runtests
- <<: *rust
variables:
HGWITHRUSTEXT: cpython
RUNTEST_ARGS: "--rust --blacklist /tmp/check-tests.txt"
@@ -97,13 +99,20 @@
test-py3-rust:
<<: *runtests
- <<: *rust
variables:
HGWITHRUSTEXT: cpython
RUNTEST_ARGS: "--rust --blacklist /tmp/check-tests.txt"
PYTHON: python3
TEST_HGMODULEPOLICY: "rust+c"
+test-py3-rhg:
+ <<: *runtests
+ variables:
+ HGWITHRUSTEXT: cpython
+ RUNTEST_ARGS: "--rust --rhg --blacklist /tmp/check-tests.txt"
+ PYTHON: python3
+ TEST_HGMODULEPOLICY: "rust+c"
+
test-py2-chg:
<<: *runtests
variables:
diff -r bc268ea9f984 -r f67b8946bb1b contrib/hg-test-mode.el
--- a/contrib/hg-test-mode.el Thu Mar 25 19:06:28 2021 -0400
+++ b/contrib/hg-test-mode.el Tue Apr 20 11:01:06 2021 -0400
@@ -1,6 +1,6 @@
;; hg-test-mode.el - Major mode for editing Mercurial tests
;;
-;; Copyright 2014 Matt Mackall
+;; Copyright 2014 Olivia Mackall
;; "I have no idea what I'm doing"
;;
;; This software may be used and distributed according to the terms of the
diff -r bc268ea9f984 -r f67b8946bb1b contrib/hgperf
--- a/contrib/hgperf Thu Mar 25 19:06:28 2021 -0400
+++ b/contrib/hgperf Tue Apr 20 11:01:06 2021 -0400
@@ -2,7 +2,7 @@
#
# hgperf - measure performance of Mercurial commands
#
-# Copyright 2014 Matt Mackall
+# Copyright 2014 Olivia Mackall
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
diff -r bc268ea9f984 -r f67b8946bb1b contrib/logo-droplets.svg
--- a/contrib/logo-droplets.svg Thu Mar 25 19:06:28 2021 -0400
+++ b/contrib/logo-droplets.svg Tue Apr 20 11:01:06 2021 -0400
@@ -1,5 +1,5 @@
-
- Mercurial is Copyright 2005-2021 Matt Mackall and others.
+ Mercurial is Copyright 2005-2021 Olivia Mackall and others.
diff -r bc268ea9f984 -r f67b8946bb1b contrib/win32/hg.bat
--- a/contrib/win32/hg.bat Thu Mar 25 19:06:28 2021 -0400
+++ b/contrib/win32/hg.bat Tue Apr 20 11:01:06 2021 -0400
@@ -4,6 +4,8 @@
setlocal
set HG=%~f0
+set PYTHONLEGACYWINDOWSSTDIO=1
+
rem Use a full path to Python (relative to this script) if it exists,
rem as the standard Python install does not put python.exe on the PATH...
rem Otherwise, expect that python.exe can be found on the PATH.
diff -r bc268ea9f984 -r f67b8946bb1b doc/Makefile
--- a/doc/Makefile Thu Mar 25 19:06:28 2021 -0400
+++ b/doc/Makefile Tue Apr 20 11:01:06 2021 -0400
@@ -6,7 +6,14 @@
PREFIX=/usr/local
MANDIR=$(PREFIX)/share/man
INSTALL=install -m 644
-PYTHON?=python
+# Default to Python 3.
+#
+# Windows ships Python 3 as `python.exe`, which may not be on PATH. py.exe is.
+ifeq ($(OS),Windows_NT)
+PYTHON?=py -3
+else
+PYTHON?=python3
+endif
RSTARGS=
export HGENCODING=UTF-8
diff -r bc268ea9f984 -r f67b8946bb1b doc/gendoc.py
--- a/doc/gendoc.py Thu Mar 25 19:06:28 2021 -0400
+++ b/doc/gendoc.py Tue Apr 20 11:01:06 2021 -0400
@@ -31,6 +31,7 @@
commands,
encoding,
extensions,
+ fancyopts,
help,
minirst,
pycompat,
@@ -86,6 +87,8 @@
if b'\n' in desc:
# only remove line breaks and indentation
desc = b' '.join(l.lstrip() for l in desc.split(b'\n'))
+ if isinstance(default, fancyopts.customopt):
+ default = default.getdefaultvalue()
if default:
default = stringutil.forcebytestr(default)
desc += _(b" (default: %s)") % default
@@ -314,7 +317,12 @@
ui.write(b"\n")
# aliases
if d[b'aliases']:
- ui.write(_(b" aliases: %s\n\n") % b" ".join(d[b'aliases']))
+ # Note the empty comment, this is required to separate this
+ # (which should be a blockquote) from any preceding things (such
+ # as a definition list).
+ ui.write(
+ _(b"..\n\n aliases: %s\n\n") % b" ".join(d[b'aliases'])
+ )
def allextensionnames():
@@ -327,6 +335,11 @@
doc = encoding.strtolocal(sys.argv[1])
ui = uimod.ui.load()
+ # Trigger extensions to load. This is disabled by default because it uses
+ # the current user's configuration, which is often not what is wanted.
+ if encoding.environ.get(b'GENDOC_LOAD_CONFIGURED_EXTENSIONS', b'0') != b'0':
+ extensions.loadall(ui)
+
if doc == b'hg.1.gendoc':
showdoc(ui)
else:
diff -r bc268ea9f984 -r f67b8946bb1b doc/runrst
--- a/doc/runrst Thu Mar 25 19:06:28 2021 -0400
+++ b/doc/runrst Tue Apr 20 11:01:06 2021 -0400
@@ -2,7 +2,7 @@
#
# runrst - register custom roles and run correct writer
#
-# Copyright 2010 Matt Mackall and others
+# Copyright 2010 Olivia Mackall and others
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
diff -r bc268ea9f984 -r f67b8946bb1b hg
--- a/hg Thu Mar 25 19:06:28 2021 -0400
+++ b/hg Tue Apr 20 11:01:06 2021 -0400
@@ -2,7 +2,7 @@
#
# mercurial - scalable distributed SCM
#
-# Copyright 2005-2007 Matt Mackall
+# Copyright 2005-2007 Olivia Mackall
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
diff -r bc268ea9f984 -r f67b8946bb1b hgdemandimport/demandimportpy2.py
--- a/hgdemandimport/demandimportpy2.py Thu Mar 25 19:06:28 2021 -0400
+++ b/hgdemandimport/demandimportpy2.py Tue Apr 20 11:01:06 2021 -0400
@@ -1,6 +1,6 @@
# demandimport.py - global demand-loading of modules for Mercurial
#
-# Copyright 2006, 2007 Matt Mackall
+# Copyright 2006, 2007 Olivia Mackall
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
diff -r bc268ea9f984 -r f67b8946bb1b hgext/absorb.py
--- a/hgext/absorb.py Thu Mar 25 19:06:28 2021 -0400
+++ b/hgext/absorb.py Tue Apr 20 11:01:06 2021 -0400
@@ -102,6 +102,9 @@
class emptyfilecontext(object):
"""minimal filecontext representing an empty file"""
+ def __init__(self, repo):
+ self._repo = repo
+
def data(self):
return b''
@@ -212,7 +215,7 @@
if path in pctx:
fctxs.append(pctx[path])
else:
- fctxs.append(emptyfilecontext())
+ fctxs.append(emptyfilecontext(pctx.repo()))
fctxs.reverse()
# note: we rely on a property of hg: filerev is not reused for linear
diff -r bc268ea9f984 -r f67b8946bb1b hgext/blackbox.py
--- a/hgext/blackbox.py Thu Mar 25 19:06:28 2021 -0400
+++ b/hgext/blackbox.py Tue Apr 20 11:01:06 2021 -0400
@@ -38,7 +38,7 @@
[blackbox]
# Include nanoseconds in log entries with %f (see Python function
# datetime.datetime.strftime)
- date-format = '%Y-%m-%d @ %H:%M:%S.%f'
+ date-format = %Y-%m-%d @ %H:%M:%S.%f
"""
diff -r bc268ea9f984 -r f67b8946bb1b hgext/churn.py
--- a/hgext/churn.py Thu Mar 25 19:06:28 2021 -0400
+++ b/hgext/churn.py Tue Apr 20 11:01:06 2021 -0400
@@ -38,11 +38,16 @@
def changedlines(ui, repo, ctx1, ctx2, fmatch):
added, removed = 0, 0
diff = b''.join(patch.diff(repo, ctx1.node(), ctx2.node(), fmatch))
+ inhunk = False
for l in diff.split(b'\n'):
- if l.startswith(b"+") and not l.startswith(b"+++ "):
+ if inhunk and l.startswith(b"+"):
added += 1
- elif l.startswith(b"-") and not l.startswith(b"--- "):
+ elif inhunk and l.startswith(b"-"):
removed += 1
+ elif l.startswith(b"@"):
+ inhunk = True
+ elif l.startswith(b"d"):
+ inhunk = False
return (added, removed)
diff -r bc268ea9f984 -r f67b8946bb1b hgext/convert/__init__.py
--- a/hgext/convert/__init__.py Thu Mar 25 19:06:28 2021 -0400
+++ b/hgext/convert/__init__.py Tue Apr 20 11:01:06 2021 -0400
@@ -1,6 +1,6 @@
# convert.py Foreign SCM converter
#
-# Copyright 2005-2007 Matt Mackall
+# Copyright 2005-2007 Olivia Mackall
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
@@ -491,6 +491,22 @@
:convert.skiptags: does not convert tags from the source repo to the target
repo. The default is False.
+
+ Subversion Destination
+ ######################
+
+ Original commit dates are not preserved by default.
+
+ :convert.svn.dangerous-set-commit-dates: preserve original commit dates,
+ forcefully setting ``svn:date`` revision properties. This option is
+ DANGEROUS and may break some subversion functionality for the resulting
+ repository (e.g. filtering revisions with date ranges in ``svn log``),
+ as original commit dates are not guaranteed to be monotonically
+ increasing.
+
+ For commit dates setting to work destination repository must have
+ ``pre-revprop-change`` hook configured to allow setting of ``svn:date``
+ revision properties. See Subversion documentation for more details.
"""
return convcmd.convert(ui, src, dest, revmapfile, **opts)
diff -r bc268ea9f984 -r f67b8946bb1b hgext/convert/common.py
--- a/hgext/convert/common.py Thu Mar 25 19:06:28 2021 -0400
+++ b/hgext/convert/common.py Tue Apr 20 11:01:06 2021 -0400
@@ -1,6 +1,6 @@
# common.py - common code for the convert extension
#
-# Copyright 2005-2009 Matt Mackall and others
+# Copyright 2005-2009 Olivia Mackall and others
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
diff -r bc268ea9f984 -r f67b8946bb1b hgext/convert/convcmd.py
--- a/hgext/convert/convcmd.py Thu Mar 25 19:06:28 2021 -0400
+++ b/hgext/convert/convcmd.py Tue Apr 20 11:01:06 2021 -0400
@@ -1,6 +1,6 @@
# convcmd - convert extension commands definition
#
-# Copyright 2005-2007 Matt Mackall
+# Copyright 2005-2007 Olivia Mackall
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
diff -r bc268ea9f984 -r f67b8946bb1b hgext/convert/cvs.py
--- a/hgext/convert/cvs.py Thu Mar 25 19:06:28 2021 -0400
+++ b/hgext/convert/cvs.py Tue Apr 20 11:01:06 2021 -0400
@@ -1,6 +1,6 @@
# cvs.py: CVS conversion code inspired by hg-cvs-import and git-cvsimport
#
-# Copyright 2005-2009 Matt Mackall and others
+# Copyright 2005-2009 Olivia Mackall and others
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
diff -r bc268ea9f984 -r f67b8946bb1b hgext/convert/darcs.py
--- a/hgext/convert/darcs.py Thu Mar 25 19:06:28 2021 -0400
+++ b/hgext/convert/darcs.py Tue Apr 20 11:01:06 2021 -0400
@@ -1,6 +1,6 @@
# darcs.py - darcs support for the convert extension
#
-# Copyright 2007-2009 Matt Mackall and others
+# Copyright 2007-2009 Olivia Mackall and others
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
diff -r bc268ea9f984 -r f67b8946bb1b hgext/convert/git.py
--- a/hgext/convert/git.py Thu Mar 25 19:06:28 2021 -0400
+++ b/hgext/convert/git.py Tue Apr 20 11:01:06 2021 -0400
@@ -1,6 +1,6 @@
# git.py - git support for the convert extension
#
-# Copyright 2005-2009 Matt Mackall and others
+# Copyright 2005-2009 Olivia Mackall and others
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
@@ -247,7 +247,8 @@
b'\n'.join(line.strip() for line in content.split(b'\n')),
)
for sec in c.sections():
- s = c[sec]
+ # turn the config object into a real dict
+ s = dict(c.items(sec))
if b'url' in s and b'path' in s:
self.submodules.append(submodule(s[b'path'], b'', s[b'url']))
diff -r bc268ea9f984 -r f67b8946bb1b hgext/convert/hg.py
--- a/hgext/convert/hg.py Thu Mar 25 19:06:28 2021 -0400
+++ b/hgext/convert/hg.py Tue Apr 20 11:01:06 2021 -0400
@@ -1,6 +1,6 @@
# hg.py - hg backend for convert extension
#
-# Copyright 2005-2009 Matt Mackall and others
+# Copyright 2005-2009 Olivia Mackall and others
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
diff -r bc268ea9f984 -r f67b8946bb1b hgext/convert/subversion.py
--- a/hgext/convert/subversion.py Thu Mar 25 19:06:28 2021 -0400
+++ b/hgext/convert/subversion.py Tue Apr 20 11:01:06 2021 -0400
@@ -97,6 +97,17 @@
return s.decode(fsencoding).encode('utf-8')
+def formatsvndate(date):
+ return dateutil.datestr(date, b'%Y-%m-%dT%H:%M:%S.000000Z')
+
+
+def parsesvndate(s):
+ # Example SVN datetime. Includes microseconds.
+ # ISO-8601 conformant
+ # '2007-01-04T17:35:00.902377Z'
+ return dateutil.parsedate(s[:19] + b' UTC', [b'%Y-%m-%dT%H:%M:%S'])
+
+
class SvnPathNotFound(Exception):
pass
@@ -1158,12 +1169,7 @@
continue
paths.append((path, ent))
- # Example SVN datetime. Includes microseconds.
- # ISO-8601 conformant
- # '2007-01-04T17:35:00.902377Z'
- date = dateutil.parsedate(
- date[:19] + b" UTC", [b"%Y-%m-%dT%H:%M:%S"]
- )
+ date = parsesvndate(date)
if self.ui.configbool(b'convert', b'localtimezone'):
date = makedatetimestamp(date[0])
@@ -1380,7 +1386,7 @@
return logstream(stdout)
-pre_revprop_change = b'''#!/bin/sh
+pre_revprop_change_template = b'''#!/bin/sh
REPOS="$1"
REV="$2"
@@ -1388,15 +1394,26 @@
PROPNAME="$4"
ACTION="$5"
-if [ "$ACTION" = "M" -a "$PROPNAME" = "svn:log" ]; then exit 0; fi
-if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-branch" ]; then exit 0; fi
-if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-rev" ]; then exit 0; fi
+%(rules)s
echo "Changing prohibited revision property" >&2
exit 1
'''
+def gen_pre_revprop_change_hook(prop_actions_allowed):
+ rules = []
+ for action, propname in prop_actions_allowed:
+ rules.append(
+ (
+ b'if [ "$ACTION" = "%s" -a "$PROPNAME" = "%s" ]; '
+ b'then exit 0; fi'
+ )
+ % (action, propname)
+ )
+ return pre_revprop_change_template % {b'rules': b'\n'.join(rules)}
+
+
class svn_sink(converter_sink, commandline):
commit_re = re.compile(br'Committed revision (\d+).', re.M)
uuid_re = re.compile(br'Repository UUID:\s*(\S+)', re.M)
@@ -1470,9 +1487,20 @@
self.is_exec = None
if created:
+ prop_actions_allowed = [
+ (b'M', b'svn:log'),
+ (b'A', b'hg:convert-branch'),
+ (b'A', b'hg:convert-rev'),
+ ]
+
+ if self.ui.configbool(
+ b'convert', b'svn.dangerous-set-commit-dates'
+ ):
+ prop_actions_allowed.append((b'M', b'svn:date'))
+
hook = os.path.join(created, b'hooks', b'pre-revprop-change')
fp = open(hook, b'wb')
- fp.write(pre_revprop_change)
+ fp.write(gen_pre_revprop_change_hook(prop_actions_allowed))
fp.close()
util.setflags(hook, False, True)
@@ -1667,6 +1695,23 @@
revprop=True,
revision=rev,
)
+
+ if self.ui.configbool(
+ b'convert', b'svn.dangerous-set-commit-dates'
+ ):
+ # Subverson always uses UTC to represent date and time
+ date = dateutil.parsedate(commit.date)
+ date = (date[0], 0)
+
+ # The only way to set date and time for svn commit is to use propset after commit is done
+ self.run(
+ b'propset',
+ b'svn:date',
+ formatsvndate(date),
+ revprop=True,
+ revision=rev,
+ )
+
for parent in parents:
self.addchild(parent, rev)
return self.revid(rev)
diff -r bc268ea9f984 -r f67b8946bb1b hgext/extdiff.py
--- a/hgext/extdiff.py Thu Mar 25 19:06:28 2021 -0400
+++ b/hgext/extdiff.py Tue Apr 20 11:01:06 2021 -0400
@@ -91,7 +91,7 @@
from mercurial.i18n import _
from mercurial.node import (
- nullid,
+ nullrev,
short,
)
from mercurial import (
@@ -565,18 +565,18 @@
repo, [from_rev] + [to_rev], b'nowarn'
)
ctx1a = scmutil.revsingle(repo, from_rev, None)
- ctx1b = repo[nullid]
+ ctx1b = repo[nullrev]
ctx2 = scmutil.revsingle(repo, to_rev, None)
else:
ctx1a, ctx2 = scmutil.revpair(repo, revs)
if not revs:
ctx1b = repo[None].p2()
else:
- ctx1b = repo[nullid]
+ ctx1b = repo[nullrev]
# Disable 3-way merge if there is only one parent
if do3way:
- if ctx1b.node() == nullid:
+ if ctx1b.rev() == nullrev:
do3way = False
matcher = scmutil.match(ctx2, pats, opts)
diff -r bc268ea9f984 -r f67b8946bb1b hgext/fastannotate/protocol.py
--- a/hgext/fastannotate/protocol.py Thu Mar 25 19:06:28 2021 -0400
+++ b/hgext/fastannotate/protocol.py Tue Apr 20 11:01:06 2021 -0400
@@ -20,6 +20,9 @@
wireprotov1peer,
wireprotov1server,
)
+from mercurial.utils import (
+ urlutil,
+)
from . import context
# common
@@ -151,9 +154,9 @@
def annotatepeer(repo):
ui = repo.ui
- remotepath = ui.expandpath(
- ui.config(b'fastannotate', b'remotepath', b'default')
- )
+ remotedest = ui.config(b'fastannotate', b'remotepath', b'default')
+ r = urlutil.get_unique_pull_path(b'fastannotate', repo, ui, remotedest)
+ remotepath = r[0]
peer = hg.peer(ui, {}, remotepath)
try:
diff -r bc268ea9f984 -r f67b8946bb1b hgext/fetch.py
--- a/hgext/fetch.py Thu Mar 25 19:06:28 2021 -0400
+++ b/hgext/fetch.py Tue Apr 20 11:01:06 2021 -0400
@@ -19,9 +19,11 @@
lock,
pycompat,
registrar,
- util,
)
-from mercurial.utils import dateutil
+from mercurial.utils import (
+ dateutil,
+ urlutil,
+)
release = lock.release
cmdtable = {}
@@ -107,10 +109,9 @@
)
)
- other = hg.peer(repo, opts, ui.expandpath(source))
- ui.status(
- _(b'pulling from %s\n') % util.hidepassword(ui.expandpath(source))
- )
+ path = urlutil.get_unique_pull_path(b'fetch', repo, ui, source)[0]
+ other = hg.peer(repo, opts, path)
+ ui.status(_(b'pulling from %s\n') % urlutil.hidepassword(path))
revs = None
if opts[b'rev']:
try:
@@ -180,7 +181,7 @@
if not err:
# we don't translate commit messages
message = cmdutil.logmessage(ui, opts) or (
- b'Automated merge with %s' % util.removeauth(other.url())
+ b'Automated merge with %s' % urlutil.removeauth(other.url())
)
editopt = opts.get(b'edit') or opts.get(b'force_editor')
editor = cmdutil.getcommiteditor(edit=editopt, editform=b'fetch')
diff -r bc268ea9f984 -r f67b8946bb1b hgext/fix.py
--- a/hgext/fix.py Thu Mar 25 19:06:28 2021 -0400
+++ b/hgext/fix.py Tue Apr 20 11:01:06 2021 -0400
@@ -131,8 +131,10 @@
import subprocess
from mercurial.i18n import _
-from mercurial.node import nullrev
-from mercurial.node import wdirrev
+from mercurial.node import (
+ nullrev,
+ wdirrev,
+)
from mercurial.utils import procutil
@@ -433,8 +435,9 @@
if not (len(revs) == 1 and wdirrev in revs):
cmdutil.checkunfinished(repo)
rewriteutil.precheck(repo, revs, b'fix')
- if wdirrev in revs and list(
- mergestatemod.mergestate.read(repo).unresolved()
+ if (
+ wdirrev in revs
+ and mergestatemod.mergestate.read(repo).unresolvedcount()
):
raise error.Abort(b'unresolved conflicts', hint=b"use 'hg resolve'")
if not revs:
diff -r bc268ea9f984 -r f67b8946bb1b hgext/git/__init__.py
--- a/hgext/git/__init__.py Thu Mar 25 19:06:28 2021 -0400
+++ b/hgext/git/__init__.py Tue Apr 20 11:01:06 2021 -0400
@@ -90,7 +90,7 @@
return os.path.join(self.path, b'..', b'.hg', f)
raise NotImplementedError(b'Need to pick file for %s.' % f)
- def changelog(self, trypending):
+ def changelog(self, trypending, concurrencychecker):
# TODO we don't have a plan for trypending in hg's git support yet
return gitlog.changelog(self.git, self._db)
diff -r bc268ea9f984 -r f67b8946bb1b hgext/git/gitlog.py
--- a/hgext/git/gitlog.py Thu Mar 25 19:06:28 2021 -0400
+++ b/hgext/git/gitlog.py Tue Apr 20 11:01:06 2021 -0400
@@ -8,6 +8,7 @@
nullhex,
nullid,
nullrev,
+ sha1nodeconstants,
wdirhex,
)
from mercurial import (
@@ -217,7 +218,7 @@
n = nodeorrev
# handle looking up nullid
if n == nullid:
- return hgchangelog._changelogrevision(extra={})
+ return hgchangelog._changelogrevision(extra={}, manifest=nullid)
hn = gitutil.togitnode(n)
# We've got a real commit!
files = [
@@ -422,6 +423,8 @@
class manifestlog(baselog):
+ nodeconstants = sha1nodeconstants
+
def __getitem__(self, node):
return self.get(b'', node)
diff -r bc268ea9f984 -r f67b8946bb1b hgext/histedit.py
--- a/hgext/histedit.py Thu Mar 25 19:06:28 2021 -0400
+++ b/hgext/histedit.py Tue Apr 20 11:01:06 2021 -0400
@@ -242,6 +242,7 @@
from mercurial.utils import (
dateutil,
stringutil,
+ urlutil,
)
pickle = util.pickle
@@ -1040,11 +1041,12 @@
Used by initialization code"""
if opts is None:
opts = {}
- dest = ui.expandpath(remote or b'default-push', remote or b'default')
- dest, branches = hg.parseurl(dest, None)[:2]
- ui.status(_(b'comparing with %s\n') % util.hidepassword(dest))
-
- revs, checkout = hg.addbranchrevs(repo, repo, branches, None)
+ path = urlutil.get_unique_push_path(b'histedit', repo, ui, remote)
+ dest = path.pushloc or path.loc
+
+ ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(dest))
+
+ revs, checkout = hg.addbranchrevs(repo, repo, (path.branch, []), None)
other = hg.peer(repo, opts, dest)
if revs:
@@ -1581,10 +1583,19 @@
def layout(mode):
maxy, maxx = stdscr.getmaxyx()
helplen = len(helplines(mode))
+ mainlen = maxy - helplen - 12
+ if mainlen < 1:
+ raise error.Abort(
+ _(b"terminal dimensions %d by %d too small for curses histedit")
+ % (maxy, maxx),
+ hint=_(
+ b"enlarge your terminal or use --config ui.interface=text"
+ ),
+ )
return {
b'commit': (12, maxx),
b'help': (helplen, maxx),
- b'main': (maxy - helplen - 12, maxx),
+ b'main': (mainlen, maxx),
}
def drawvertwin(size, y, x):
@@ -1614,63 +1625,60 @@
stdscr.clear()
stdscr.refresh()
while True:
- try:
- oldmode, _ = state[b'mode']
- if oldmode == MODE_INIT:
- changemode(state, MODE_RULES)
- e = event(state, ch)
-
- if e == E_QUIT:
- return False
- if e == E_HISTEDIT:
- return state[b'rules']
+ oldmode, unused = state[b'mode']
+ if oldmode == MODE_INIT:
+ changemode(state, MODE_RULES)
+ e = event(state, ch)
+
+ if e == E_QUIT:
+ return False
+ if e == E_HISTEDIT:
+ return state[b'rules']
+ else:
+ if e == E_RESIZE:
+ size = screen_size()
+ if size != stdscr.getmaxyx():
+ curses.resizeterm(*size)
+
+ curmode, unused = state[b'mode']
+ sizes = layout(curmode)
+ if curmode != oldmode:
+ state[b'page_height'] = sizes[b'main'][0]
+ # Adjust the view to fit the current screen size.
+ movecursor(state, state[b'pos'], state[b'pos'])
+
+ # Pack the windows against the top, each pane spread across the
+ # full width of the screen.
+ y, x = (0, 0)
+ helpwin, y, x = drawvertwin(sizes[b'help'], y, x)
+ mainwin, y, x = drawvertwin(sizes[b'main'], y, x)
+ commitwin, y, x = drawvertwin(sizes[b'commit'], y, x)
+
+ if e in (E_PAGEDOWN, E_PAGEUP, E_LINEDOWN, E_LINEUP):
+ if e == E_PAGEDOWN:
+ changeview(state, +1, b'page')
+ elif e == E_PAGEUP:
+ changeview(state, -1, b'page')
+ elif e == E_LINEDOWN:
+ changeview(state, +1, b'line')
+ elif e == E_LINEUP:
+ changeview(state, -1, b'line')
+
+ # start rendering
+ commitwin.erase()
+ helpwin.erase()
+ mainwin.erase()
+ if curmode == MODE_PATCH:
+ renderpatch(mainwin, state)
+ elif curmode == MODE_HELP:
+ renderstring(mainwin, state, __doc__.strip().splitlines())
else:
- if e == E_RESIZE:
- size = screen_size()
- if size != stdscr.getmaxyx():
- curses.resizeterm(*size)
-
- curmode, _ = state[b'mode']
- sizes = layout(curmode)
- if curmode != oldmode:
- state[b'page_height'] = sizes[b'main'][0]
- # Adjust the view to fit the current screen size.
- movecursor(state, state[b'pos'], state[b'pos'])
-
- # Pack the windows against the top, each pane spread across the
- # full width of the screen.
- y, x = (0, 0)
- helpwin, y, x = drawvertwin(sizes[b'help'], y, x)
- mainwin, y, x = drawvertwin(sizes[b'main'], y, x)
- commitwin, y, x = drawvertwin(sizes[b'commit'], y, x)
-
- if e in (E_PAGEDOWN, E_PAGEUP, E_LINEDOWN, E_LINEUP):
- if e == E_PAGEDOWN:
- changeview(state, +1, b'page')
- elif e == E_PAGEUP:
- changeview(state, -1, b'page')
- elif e == E_LINEDOWN:
- changeview(state, +1, b'line')
- elif e == E_LINEUP:
- changeview(state, -1, b'line')
-
- # start rendering
- commitwin.erase()
- helpwin.erase()
- mainwin.erase()
- if curmode == MODE_PATCH:
- renderpatch(mainwin, state)
- elif curmode == MODE_HELP:
- renderstring(mainwin, state, __doc__.strip().splitlines())
- else:
- renderrules(mainwin, state)
- rendercommit(commitwin, state)
- renderhelp(helpwin, state)
- curses.doupdate()
- # done rendering
- ch = encoding.strtolocal(stdscr.getkey())
- except curses.error:
- pass
+ renderrules(mainwin, state)
+ rendercommit(commitwin, state)
+ renderhelp(helpwin, state)
+ curses.doupdate()
+ # done rendering
+ ch = encoding.strtolocal(stdscr.getkey())
def _chistedit(ui, repo, freeargs, opts):
diff -r bc268ea9f984 -r f67b8946bb1b hgext/infinitepush/__init__.py
--- a/hgext/infinitepush/__init__.py Thu Mar 25 19:06:28 2021 -0400
+++ b/hgext/infinitepush/__init__.py Tue Apr 20 11:01:06 2021 -0400
@@ -116,6 +116,7 @@
from mercurial.utils import (
procutil,
stringutil,
+ urlutil,
)
from mercurial import (
@@ -683,7 +684,13 @@
def _pull(orig, ui, repo, source=b"default", **opts):
opts = pycompat.byteskwargs(opts)
# Copy paste from `pull` command
- source, branches = hg.parseurl(ui.expandpath(source), opts.get(b'branch'))
+ source, branches = urlutil.get_unique_pull_path(
+ b"infinite-push's pull",
+ repo,
+ ui,
+ source,
+ default_branches=opts.get(b'branch'),
+ )
scratchbookmarks = {}
unfi = repo.unfiltered()
@@ -704,16 +711,19 @@
if scratchbookmarks:
other = hg.peer(repo, opts, source)
- fetchedbookmarks = other.listkeyspatterns(
- b'bookmarks', patterns=scratchbookmarks
- )
- for bookmark in scratchbookmarks:
- if bookmark not in fetchedbookmarks:
- raise error.Abort(
- b'remote bookmark %s not found!' % bookmark
- )
- scratchbookmarks[bookmark] = fetchedbookmarks[bookmark]
- revs.append(fetchedbookmarks[bookmark])
+ try:
+ fetchedbookmarks = other.listkeyspatterns(
+ b'bookmarks', patterns=scratchbookmarks
+ )
+ for bookmark in scratchbookmarks:
+ if bookmark not in fetchedbookmarks:
+ raise error.Abort(
+ b'remote bookmark %s not found!' % bookmark
+ )
+ scratchbookmarks[bookmark] = fetchedbookmarks[bookmark]
+ revs.append(fetchedbookmarks[bookmark])
+ finally:
+ other.close()
opts[b'bookmark'] = bookmarks
opts[b'rev'] = revs
@@ -805,7 +815,7 @@
return common, True, remoteheads
-def _push(orig, ui, repo, dest=None, *args, **opts):
+def _push(orig, ui, repo, *dests, **opts):
opts = pycompat.byteskwargs(opts)
bookmark = opts.get(b'bookmark')
# we only support pushing one infinitepush bookmark at once
@@ -833,25 +843,28 @@
oldphasemove = extensions.wrapfunction(
exchange, b'_localphasemove', _phasemove
)
- # Copy-paste from `push` command
- path = ui.paths.getpath(dest, default=(b'default-push', b'default'))
- if not path:
- raise error.Abort(
- _(b'default repository not configured!'),
- hint=_(b"see 'hg help config.paths'"),
- )
+
+ paths = list(urlutil.get_push_paths(repo, ui, dests))
+ if len(paths) > 1:
+ msg = _(b'cannot push to multiple path with infinitepush')
+ raise error.Abort(msg)
+
+ path = paths[0]
destpath = path.pushloc or path.loc
# Remote scratch bookmarks will be deleted because remotenames doesn't
# know about them. Let's save it before push and restore after
remotescratchbookmarks = _readscratchremotebookmarks(ui, repo, destpath)
- result = orig(ui, repo, dest, *args, **pycompat.strkwargs(opts))
+ result = orig(ui, repo, *dests, **pycompat.strkwargs(opts))
if common.isremotebooksenabled(ui):
if bookmark and scratchpush:
other = hg.peer(repo, opts, destpath)
- fetchedbookmarks = other.listkeyspatterns(
- b'bookmarks', patterns=[bookmark]
- )
- remotescratchbookmarks.update(fetchedbookmarks)
+ try:
+ fetchedbookmarks = other.listkeyspatterns(
+ b'bookmarks', patterns=[bookmark]
+ )
+ remotescratchbookmarks.update(fetchedbookmarks)
+ finally:
+ other.close()
_saveremotebookmarks(repo, remotescratchbookmarks, destpath)
if oldphasemove:
exchange._localphasemove = oldphasemove
diff -r bc268ea9f984 -r f67b8946bb1b hgext/largefiles/basestore.py
--- a/hgext/largefiles/basestore.py Thu Mar 25 19:06:28 2021 -0400
+++ b/hgext/largefiles/basestore.py Tue Apr 20 11:01:06 2021 -0400
@@ -12,6 +12,9 @@
from mercurial.i18n import _
from mercurial import node, util
+from mercurial.utils import (
+ urlutil,
+)
from . import lfutil
@@ -29,13 +32,13 @@
def longmessage(self):
return _(b"error getting id %s from url %s for file %s: %s\n") % (
self.hash,
- util.hidepassword(self.url),
+ urlutil.hidepassword(self.url),
self.filename,
self.detail,
)
def __str__(self):
- return b"%s: %s" % (util.hidepassword(self.url), self.detail)
+ return b"%s: %s" % (urlutil.hidepassword(self.url), self.detail)
class basestore(object):
@@ -79,7 +82,7 @@
if not available.get(hash):
ui.warn(
_(b'%s: largefile %s not available from %s\n')
- % (filename, hash, util.hidepassword(self.url))
+ % (filename, hash, urlutil.hidepassword(self.url))
)
missing.append(filename)
continue
diff -r bc268ea9f984 -r f67b8946bb1b hgext/largefiles/lfutil.py
--- a/hgext/largefiles/lfutil.py Thu Mar 25 19:06:28 2021 -0400
+++ b/hgext/largefiles/lfutil.py Tue Apr 20 11:01:06 2021 -0400
@@ -206,6 +206,7 @@
repo.root,
repo.dirstate._validate,
lambda: sparse.matcher(repo),
+ repo.nodeconstants,
)
# If the largefiles dirstate does not exist, populate and create
@@ -513,7 +514,7 @@
def islfilesrepo(repo):
'''Return true if the repo is a largefile repo.'''
if b'largefiles' in repo.requirements and any(
- shortnameslash in f[0] for f in repo.store.datafiles()
+ shortnameslash in f[1] for f in repo.store.datafiles()
):
return True
diff -r bc268ea9f984 -r f67b8946bb1b hgext/largefiles/overrides.py
--- a/hgext/largefiles/overrides.py Thu Mar 25 19:06:28 2021 -0400
+++ b/hgext/largefiles/overrides.py Tue Apr 20 11:01:06 2021 -0400
@@ -1567,7 +1567,7 @@
# Calling purge with --all will cause the largefiles to be deleted.
# Override repo.status to prevent this from happening.
-@eh.wrapcommand(b'purge', extension=b'purge')
+@eh.wrapcommand(b'purge')
def overridepurge(orig, ui, repo, *dirs, **opts):
# XXX Monkey patching a repoview will not work. The assigned attribute will
# be set on the unfiltered repo, but we will only lookup attributes in the
diff -r bc268ea9f984 -r f67b8946bb1b hgext/largefiles/remotestore.py
--- a/hgext/largefiles/remotestore.py Thu Mar 25 19:06:28 2021 -0400
+++ b/hgext/largefiles/remotestore.py Tue Apr 20 11:01:06 2021 -0400
@@ -15,7 +15,10 @@
util,
)
-from mercurial.utils import stringutil
+from mercurial.utils import (
+ stringutil,
+ urlutil,
+)
from . import (
basestore,
@@ -40,11 +43,11 @@
if self.sendfile(source, hash):
raise error.Abort(
_(b'remotestore: could not put %s to remote store %s')
- % (source, util.hidepassword(self.url))
+ % (source, urlutil.hidepassword(self.url))
)
self.ui.debug(
_(b'remotestore: put %s to remote store %s\n')
- % (source, util.hidepassword(self.url))
+ % (source, urlutil.hidepassword(self.url))
)
def exists(self, hashes):
@@ -80,7 +83,7 @@
# keep trying with the other files... they will probably
# all fail too.
raise error.Abort(
- b'%s: %s' % (util.hidepassword(self.url), e.reason)
+ b'%s: %s' % (urlutil.hidepassword(self.url), e.reason)
)
except IOError as e:
raise basestore.StoreError(
diff -r bc268ea9f984 -r f67b8946bb1b hgext/largefiles/reposetup.py
--- a/hgext/largefiles/reposetup.py Thu Mar 25 19:06:28 2021 -0400
+++ b/hgext/largefiles/reposetup.py Tue Apr 20 11:01:06 2021 -0400
@@ -445,7 +445,7 @@
def checkrequireslfiles(ui, repo, **kwargs):
if b'largefiles' not in repo.requirements and any(
- lfutil.shortname + b'/' in f[0] for f in repo.store.datafiles()
+ lfutil.shortname + b'/' in f[1] for f in repo.store.datafiles()
):
repo.requirements.add(b'largefiles')
scmutil.writereporequirements(repo)
diff -r bc268ea9f984 -r f67b8946bb1b hgext/largefiles/storefactory.py
--- a/hgext/largefiles/storefactory.py Thu Mar 25 19:06:28 2021 -0400
+++ b/hgext/largefiles/storefactory.py Tue Apr 20 11:01:06 2021 -0400
@@ -12,6 +12,9 @@
hg,
util,
)
+from mercurial.utils import (
+ urlutil,
+)
from . import (
lfutil,
@@ -19,6 +22,7 @@
wirestore,
)
+
# During clone this function is passed the src's ui object
# but it needs the dest's ui object so it can read out of
# the config file. Use repo.ui instead.
@@ -28,24 +32,27 @@
if not remote:
lfpullsource = getattr(repo, 'lfpullsource', None)
- if lfpullsource:
- path = ui.expandpath(lfpullsource)
- elif put:
- path = ui.expandpath(b'default-push', b'default')
+ if put:
+ path = urlutil.get_unique_push_path(
+ b'lfpullsource', repo, ui, lfpullsource
+ )
else:
- path = ui.expandpath(b'default')
+ path, _branches = urlutil.get_unique_pull_path(
+ b'lfpullsource', repo, ui, lfpullsource
+ )
- # ui.expandpath() leaves 'default-push' and 'default' alone if
- # they cannot be expanded: fallback to the empty string,
- # meaning the current directory.
+ # XXX we should not explicitly pass b'default', as this will result in
+ # b'default' being returned if no `paths.default` was defined. We
+ # should explicitely handle the lack of value instead.
if repo is None:
- path = ui.expandpath(b'default')
- path, _branches = hg.parseurl(path)
+ path, _branches = urlutil.get_unique_pull_path(
+ b'lfs', repo, ui, b'default'
+ )
remote = hg.peer(repo or ui, {}, path)
elif path == b'default-push' or path == b'default':
remote = repo
else:
- path, _branches = hg.parseurl(path)
+ path, _branches = urlutil.parseurl(path)
remote = hg.peer(repo or ui, {}, path)
# The path could be a scheme so use Mercurial's normal functionality
@@ -71,7 +78,7 @@
raise error.Abort(
_(b'%s does not appear to be a largefile store')
- % util.hidepassword(path)
+ % urlutil.hidepassword(path)
)
diff -r bc268ea9f984 -r f67b8946bb1b hgext/lfs/blobstore.py
--- a/hgext/lfs/blobstore.py Thu Mar 25 19:06:28 2021 -0400
+++ b/hgext/lfs/blobstore.py Tue Apr 20 11:01:06 2021 -0400
@@ -31,7 +31,10 @@
worker,
)
-from mercurial.utils import stringutil
+from mercurial.utils import (
+ stringutil,
+ urlutil,
+)
from ..largefiles import lfutil
@@ -725,7 +728,7 @@
https://github.com/git-lfs/git-lfs/blob/master/docs/api/server-discovery.md
"""
lfsurl = repo.ui.config(b'lfs', b'url')
- url = util.url(lfsurl or b'')
+ url = urlutil.url(lfsurl or b'')
if lfsurl is None:
if remote:
path = remote
@@ -739,7 +742,7 @@
# and fall back to inferring from 'paths.remote' if unspecified.
path = repo.ui.config(b'paths', b'default') or b''
- defaulturl = util.url(path)
+ defaulturl = urlutil.url(path)
# TODO: support local paths as well.
# TODO: consider the ssh -> https transformation that git applies
@@ -748,7 +751,7 @@
defaulturl.path += b'/'
defaulturl.path = (defaulturl.path or b'') + b'.git/info/lfs'
- url = util.url(bytes(defaulturl))
+ url = urlutil.url(bytes(defaulturl))
repo.ui.note(_(b'lfs: assuming remote store: %s\n') % url)
scheme = url.scheme
diff -r bc268ea9f984 -r f67b8946bb1b hgext/lfs/wrapper.py
--- a/hgext/lfs/wrapper.py Thu Mar 25 19:06:28 2021 -0400
+++ b/hgext/lfs/wrapper.py Tue Apr 20 11:01:06 2021 -0400
@@ -116,10 +116,10 @@
if hgmeta or text.startswith(b'\1\n'):
text = storageutil.packmeta(hgmeta, text)
- return (text, True, {})
+ return (text, True)
-def writetostore(self, text, sidedata):
+def writetostore(self, text):
# hg filelog metadata (includes rename, etc)
hgmeta, offset = storageutil.parsemeta(text)
if offset and offset > 0:
diff -r bc268ea9f984 -r f67b8946bb1b hgext/mq.py
--- a/hgext/mq.py Thu Mar 25 19:06:28 2021 -0400
+++ b/hgext/mq.py Tue Apr 20 11:01:06 2021 -0400
@@ -108,6 +108,7 @@
from mercurial.utils import (
dateutil,
stringutil,
+ urlutil,
)
release = lockmod.release
@@ -2509,7 +2510,7 @@
)
filename = normname(filename)
self.checkreservedname(filename)
- if util.url(filename).islocal():
+ if urlutil.url(filename).islocal():
originpath = self.join(filename)
if not os.path.isfile(originpath):
raise error.Abort(
@@ -2862,11 +2863,12 @@
# main repo (destination and sources)
if dest is None:
dest = hg.defaultdest(source)
- sr = hg.peer(ui, opts, ui.expandpath(source))
+ __, source_path, __ = urlutil.get_clone_path(ui, source)
+ sr = hg.peer(ui, opts, source_path)
# patches repo (source only)
if opts.get(b'patches'):
- patchespath = ui.expandpath(opts.get(b'patches'))
+ __, patchespath, __ = urlutil.get_clone_path(ui, opts.get(b'patches'))
else:
patchespath = patchdir(sr)
try:
diff -r bc268ea9f984 -r f67b8946bb1b hgext/narrow/narrowcommands.py
--- a/hgext/narrow/narrowcommands.py Thu Mar 25 19:06:28 2021 -0400
+++ b/hgext/narrow/narrowcommands.py Tue Apr 20 11:01:06 2021 -0400
@@ -36,6 +36,9 @@
util,
wireprototypes,
)
+from mercurial.utils import (
+ urlutil,
+)
table = {}
command = registrar.command(table)
@@ -214,6 +217,7 @@
newincludes,
newexcludes,
force,
+ backup,
):
oldmatch = narrowspec.match(repo.root, oldincludes, oldexcludes)
newmatch = narrowspec.match(repo.root, newincludes, newexcludes)
@@ -272,10 +276,10 @@
hg.clean(repo, urev)
overrides = {(b'devel', b'strip-obsmarkers'): False}
with ui.configoverride(overrides, b'narrow'):
- repair.strip(ui, unfi, tostrip, topic=b'narrow')
+ repair.strip(ui, unfi, tostrip, topic=b'narrow', backup=backup)
todelete = []
- for f, f2, size in repo.store.datafiles():
+ for t, f, f2, size in repo.store.datafiles():
if f.startswith(b'data/'):
file = f[5:-2]
if not newmatch(file):
@@ -442,6 +446,12 @@
),
(
b'',
+ b'backup',
+ True,
+ _(b'back up local changes when narrowing'),
+ ),
+ (
+ b'',
b'update-working-copy',
False,
_(b'update working copy when the store has changed'),
@@ -583,81 +593,88 @@
# Find the revisions we have in common with the remote. These will
# be used for finding local-only changes for narrowing. They will
# also define the set of revisions to update for widening.
- remotepath = ui.expandpath(remotepath or b'default')
- url, branches = hg.parseurl(remotepath)
- ui.status(_(b'comparing with %s\n') % util.hidepassword(url))
+ r = urlutil.get_unique_pull_path(b'tracked', repo, ui, remotepath)
+ url, branches = r
+ ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(url))
remote = hg.peer(repo, opts, url)
- # check narrow support before doing anything if widening needs to be
- # performed. In future we should also abort if client is ellipses and
- # server does not support ellipses
- if widening and wireprototypes.NARROWCAP not in remote.capabilities():
- raise error.Abort(_(b"server does not support narrow clones"))
+ try:
+ # check narrow support before doing anything if widening needs to be
+ # performed. In future we should also abort if client is ellipses and
+ # server does not support ellipses
+ if (
+ widening
+ and wireprototypes.NARROWCAP not in remote.capabilities()
+ ):
+ raise error.Abort(_(b"server does not support narrow clones"))
- commoninc = discovery.findcommonincoming(repo, remote)
+ commoninc = discovery.findcommonincoming(repo, remote)
- if autoremoveincludes:
- outgoing = discovery.findcommonoutgoing(
- repo, remote, commoninc=commoninc
- )
- ui.status(_(b'looking for unused includes to remove\n'))
- localfiles = set()
- for n in itertools.chain(outgoing.missing, outgoing.excluded):
- localfiles.update(repo[n].files())
- suggestedremovals = []
- for include in sorted(oldincludes):
- match = narrowspec.match(repo.root, [include], oldexcludes)
- if not any(match(f) for f in localfiles):
- suggestedremovals.append(include)
- if suggestedremovals:
- for s in suggestedremovals:
- ui.status(b'%s\n' % s)
- if (
- ui.promptchoice(
- _(
- b'remove these unused includes (yn)?'
- b'$$ &Yes $$ &No'
+ if autoremoveincludes:
+ outgoing = discovery.findcommonoutgoing(
+ repo, remote, commoninc=commoninc
+ )
+ ui.status(_(b'looking for unused includes to remove\n'))
+ localfiles = set()
+ for n in itertools.chain(outgoing.missing, outgoing.excluded):
+ localfiles.update(repo[n].files())
+ suggestedremovals = []
+ for include in sorted(oldincludes):
+ match = narrowspec.match(repo.root, [include], oldexcludes)
+ if not any(match(f) for f in localfiles):
+ suggestedremovals.append(include)
+ if suggestedremovals:
+ for s in suggestedremovals:
+ ui.status(b'%s\n' % s)
+ if (
+ ui.promptchoice(
+ _(
+ b'remove these unused includes (yn)?'
+ b'$$ &Yes $$ &No'
+ )
)
- )
- == 0
- ):
- removedincludes.update(suggestedremovals)
- narrowing = True
- else:
- ui.status(_(b'found no unused includes\n'))
+ == 0
+ ):
+ removedincludes.update(suggestedremovals)
+ narrowing = True
+ else:
+ ui.status(_(b'found no unused includes\n'))
- if narrowing:
- newincludes = oldincludes - removedincludes
- newexcludes = oldexcludes | addedexcludes
- _narrow(
- ui,
- repo,
- remote,
- commoninc,
- oldincludes,
- oldexcludes,
- newincludes,
- newexcludes,
- opts[b'force_delete_local_changes'],
- )
- # _narrow() updated the narrowspec and _widen() below needs to
- # use the updated values as its base (otherwise removed includes
- # and addedexcludes will be lost in the resulting narrowspec)
- oldincludes = newincludes
- oldexcludes = newexcludes
+ if narrowing:
+ newincludes = oldincludes - removedincludes
+ newexcludes = oldexcludes | addedexcludes
+ _narrow(
+ ui,
+ repo,
+ remote,
+ commoninc,
+ oldincludes,
+ oldexcludes,
+ newincludes,
+ newexcludes,
+ opts[b'force_delete_local_changes'],
+ opts[b'backup'],
+ )
+ # _narrow() updated the narrowspec and _widen() below needs to
+ # use the updated values as its base (otherwise removed includes
+ # and addedexcludes will be lost in the resulting narrowspec)
+ oldincludes = newincludes
+ oldexcludes = newexcludes
- if widening:
- newincludes = oldincludes | addedincludes
- newexcludes = oldexcludes - removedexcludes
- _widen(
- ui,
- repo,
- remote,
- commoninc,
- oldincludes,
- oldexcludes,
- newincludes,
- newexcludes,
- )
+ if widening:
+ newincludes = oldincludes | addedincludes
+ newexcludes = oldexcludes - removedexcludes
+ _widen(
+ ui,
+ repo,
+ remote,
+ commoninc,
+ oldincludes,
+ oldexcludes,
+ newincludes,
+ newexcludes,
+ )
+ finally:
+ remote.close()
return 0
diff -r bc268ea9f984 -r f67b8946bb1b hgext/patchbomb.py
--- a/hgext/patchbomb.py Thu Mar 25 19:06:28 2021 -0400
+++ b/hgext/patchbomb.py Tue Apr 20 11:01:06 2021 -0400
@@ -1,6 +1,6 @@
# patchbomb.py - sending Mercurial changesets as patch emails
#
-# Copyright 2005-2009 Matt Mackall and others
+# Copyright 2005-2009 Olivia Mackall and others
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
@@ -99,7 +99,10 @@
templater,
util,
)
-from mercurial.utils import dateutil
+from mercurial.utils import (
+ dateutil,
+ urlutil,
+)
stringio = util.stringio
@@ -379,7 +382,10 @@
if btype:
opts['type'] = btype
try:
- commands.bundle(ui, repo, tmpfn, dest, **opts)
+ dests = []
+ if dest:
+ dests = [dest]
+ commands.bundle(ui, repo, tmpfn, *dests, **opts)
return util.readfile(tmpfn)
finally:
try:
@@ -527,9 +533,9 @@
def _getoutgoing(repo, dest, revs):
'''Return the revisions present locally but not in dest'''
ui = repo.ui
- url = ui.expandpath(dest or b'default-push', dest or b'default')
- url = hg.parseurl(url)[0]
- ui.status(_(b'comparing with %s\n') % util.hidepassword(url))
+ paths = urlutil.get_push_paths(repo, ui, [dest])
+ safe_paths = [urlutil.hidepassword(p.rawloc) for p in paths]
+ ui.status(_(b'comparing with %s\n') % b','.join(safe_paths))
revs = [r for r in revs if r >= 0]
if not revs:
diff -r bc268ea9f984 -r f67b8946bb1b hgext/phabricator.py
--- a/hgext/phabricator.py Thu Mar 25 19:06:28 2021 -0400
+++ b/hgext/phabricator.py Tue Apr 20 11:01:06 2021 -0400
@@ -103,6 +103,7 @@
from mercurial.utils import (
procutil,
stringutil,
+ urlutil,
)
from . import show
@@ -366,7 +367,7 @@
process(k, v)
process(b'', params)
- return util.urlreq.urlencode(flatparams)
+ return urlutil.urlreq.urlencode(flatparams)
def readurltoken(ui):
@@ -381,7 +382,7 @@
_(b'config %s.%s is required') % (b'phabricator', b'url')
)
- res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
+ res = httpconnectionmod.readauthforuri(ui, url, urlutil.url(url).user)
token = None
if res:
diff -r bc268ea9f984 -r f67b8946bb1b hgext/purge.py
--- a/hgext/purge.py Thu Mar 25 19:06:28 2021 -0400
+++ b/hgext/purge.py Tue Apr 20 11:01:06 2021 -0400
@@ -22,115 +22,11 @@
# You should have received a copy of the GNU General Public License
# along with this program; if not, see .
-'''command to delete untracked files from the working directory'''
-from __future__ import absolute_import
-
-from mercurial.i18n import _
-from mercurial import (
- cmdutil,
- merge as mergemod,
- pycompat,
- registrar,
- scmutil,
-)
-
-cmdtable = {}
-command = registrar.command(cmdtable)
-# Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
-# extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
-# be specifying the version(s) of Mercurial they are tested with, or
-# leave the attribute unspecified.
-testedwith = b'ships-with-hg-core'
-
-
-@command(
- b'purge|clean',
- [
- (b'a', b'abort-on-err', None, _(b'abort if an error occurs')),
- (b'', b'all', None, _(b'purge ignored files too')),
- (b'i', b'ignored', None, _(b'purge only ignored files')),
- (b'', b'dirs', None, _(b'purge empty directories')),
- (b'', b'files', None, _(b'purge files')),
- (b'p', b'print', None, _(b'print filenames instead of deleting them')),
- (
- b'0',
- b'print0',
- None,
- _(
- b'end filenames with NUL, for use with xargs'
- b' (implies -p/--print)'
- ),
- ),
- ]
- + cmdutil.walkopts,
- _(b'hg purge [OPTION]... [DIR]...'),
- helpcategory=command.CATEGORY_WORKING_DIRECTORY,
-)
-def purge(ui, repo, *dirs, **opts):
- """removes files not tracked by Mercurial
-
- Delete files not known to Mercurial. This is useful to test local
- and uncommitted changes in an otherwise-clean source tree.
-
- This means that purge will delete the following by default:
-
- - Unknown files: files marked with "?" by :hg:`status`
- - Empty directories: in fact Mercurial ignores directories unless
- they contain files under source control management
+'''command to delete untracked files from the working directory (DEPRECATED)
- But it will leave untouched:
-
- - Modified and unmodified tracked files
- - Ignored files (unless -i or --all is specified)
- - New files added to the repository (with :hg:`add`)
-
- The --files and --dirs options can be used to direct purge to delete
- only files, only directories, or both. If neither option is given,
- both will be deleted.
-
- If directories are given on the command line, only files in these
- directories are considered.
-
- Be careful with purge, as you could irreversibly delete some files
- you forgot to add to the repository. If you only want to print the
- list of files that this program would delete, use the --print
- option.
- """
- opts = pycompat.byteskwargs(opts)
- cmdutil.check_at_most_one_arg(opts, b'all', b'ignored')
+The functionality of this extension has been included in core Mercurial since
+version 5.7. Please use :hg:`purge ...` instead. :hg:`purge --confirm` is now the default, unless the extension is enabled for backward compatibility.
+'''
- act = not opts.get(b'print')
- eol = b'\n'
- if opts.get(b'print0'):
- eol = b'\0'
- act = False # --print0 implies --print
- if opts.get(b'all', False):
- ignored = True
- unknown = True
- else:
- ignored = opts.get(b'ignored', False)
- unknown = not ignored
-
- removefiles = opts.get(b'files')
- removedirs = opts.get(b'dirs')
-
- if not removefiles and not removedirs:
- removefiles = True
- removedirs = True
-
- match = scmutil.match(repo[None], dirs, opts)
-
- paths = mergemod.purge(
- repo,
- match,
- unknown=unknown,
- ignored=ignored,
- removeemptydirs=removedirs,
- removefiles=removefiles,
- abortonerror=opts.get(b'abort_on_err'),
- noop=not act,
- )
-
- for path in paths:
- if not act:
- ui.write(b'%s%s' % (path, eol))
+# This empty extension looks pointless, but core mercurial checks if it's loaded
+# to implement the slightly different behavior documented above.
diff -r bc268ea9f984 -r f67b8946bb1b hgext/rebase.py
--- a/hgext/rebase.py Thu Mar 25 19:06:28 2021 -0400
+++ b/hgext/rebase.py Tue Apr 20 11:01:06 2021 -0400
@@ -67,6 +67,14 @@
cmdtable = {}
command = registrar.command(cmdtable)
+
+configtable = {}
+configitem = registrar.configitem(configtable)
+configitem(
+ b'devel',
+ b'rebase.force-in-memory-merge',
+ default=False,
+)
# Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
# extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
# be specifying the version(s) of Mercurial they are tested with, or
@@ -136,7 +144,7 @@
return smartset.baseset()
dests = destutil.orphanpossibledestination(repo, src)
if len(dests) > 1:
- raise error.Abort(
+ raise error.StateError(
_(b"ambiguous automatic rebase: %r could end up on any of %r")
% (src, dests)
)
@@ -197,8 +205,8 @@
self.skipemptysuccessorf = rewriteutil.skip_empty_successor(
repo.ui, b'rebase'
)
- self.obsoletenotrebased = {}
- self.obsoletewithoutsuccessorindestination = set()
+ self.obsolete_with_successor_in_destination = {}
+ self.obsolete_with_successor_in_rebase_set = set()
self.inmemory = inmemory
self.dryrun = dryrun
self.stateobj = statemod.cmdstate(repo, b'rebasestate')
@@ -340,25 +348,33 @@
return data
- def _handleskippingobsolete(self, obsoleterevs, destmap):
- """Compute structures necessary for skipping obsolete revisions
-
- obsoleterevs: iterable of all obsolete revisions in rebaseset
- destmap: {srcrev: destrev} destination revisions
- """
- self.obsoletenotrebased = {}
+ def _handleskippingobsolete(self):
+ """Compute structures necessary for skipping obsolete revisions"""
+ if self.keepf:
+ return
if not self.ui.configbool(b'experimental', b'rebaseskipobsolete'):
return
- obsoleteset = set(obsoleterevs)
+ obsoleteset = {r for r in self.state if self.repo[r].obsolete()}
(
- self.obsoletenotrebased,
- self.obsoletewithoutsuccessorindestination,
- obsoleteextinctsuccessors,
- ) = _computeobsoletenotrebased(self.repo, obsoleteset, destmap)
- skippedset = set(self.obsoletenotrebased)
- skippedset.update(self.obsoletewithoutsuccessorindestination)
- skippedset.update(obsoleteextinctsuccessors)
+ self.obsolete_with_successor_in_destination,
+ self.obsolete_with_successor_in_rebase_set,
+ ) = _compute_obsolete_sets(self.repo, obsoleteset, self.destmap)
+ skippedset = set(self.obsolete_with_successor_in_destination)
+ skippedset.update(self.obsolete_with_successor_in_rebase_set)
_checkobsrebase(self.repo, self.ui, obsoleteset, skippedset)
+ allowdivergence = self.ui.configbool(
+ b'experimental', b'evolution.allowdivergence'
+ )
+ if allowdivergence:
+ self.obsolete_with_successor_in_rebase_set = set()
+ else:
+ for rev in self.repo.revs(
+ b'descendants(%ld) and not %ld',
+ self.obsolete_with_successor_in_rebase_set,
+ self.obsolete_with_successor_in_rebase_set,
+ ):
+ self.state.pop(rev, None)
+ self.destmap.pop(rev, None)
def _prepareabortorcontinue(
self, isabort, backup=True, suppwarns=False, dryrun=False, confirm=False
@@ -366,6 +382,8 @@
self.resume = True
try:
self.restorestatus()
+ # Calculate self.obsolete_* sets
+ self._handleskippingobsolete()
self.collapsemsg = restorecollapsemsg(self.repo, isabort)
except error.RepoLookupError:
if isabort:
@@ -396,15 +414,6 @@
if not destmap:
return _nothingtorebase()
- rebaseset = destmap.keys()
- if not self.keepf:
- try:
- rewriteutil.precheck(self.repo, rebaseset, action=b'rebase')
- except error.Abort as e:
- if e.hint is None:
- e.hint = _(b'use --keep to keep original changesets')
- raise e
-
result = buildstate(self.repo, destmap, self.collapsef)
if not result:
@@ -416,7 +425,7 @@
if self.collapsef:
dests = set(self.destmap.values())
if len(dests) != 1:
- raise error.Abort(
+ raise error.InputError(
_(b'--collapse does not work with multiple destinations')
)
destrev = next(iter(dests))
@@ -430,6 +439,20 @@
if dest.closesbranch() and not self.keepbranchesf:
self.ui.status(_(b'reopening closed branch head %s\n') % dest)
+ # Calculate self.obsolete_* sets
+ self._handleskippingobsolete()
+
+ if not self.keepf:
+ rebaseset = set(destmap.keys())
+ rebaseset -= set(self.obsolete_with_successor_in_destination)
+ rebaseset -= self.obsolete_with_successor_in_rebase_set
+ try:
+ rewriteutil.precheck(self.repo, rebaseset, action=b'rebase')
+ except error.Abort as e:
+ if e.hint is None:
+ e.hint = _(b'use --keep to keep original changesets')
+ raise e
+
self.prepared = True
def _assignworkingcopy(self):
@@ -461,14 +484,10 @@
for rev in self.state:
branches.add(repo[rev].branch())
if len(branches) > 1:
- raise error.Abort(
+ raise error.InputError(
_(b'cannot collapse multiple named branches')
)
- # Calculate self.obsoletenotrebased
- obsrevs = _filterobsoleterevs(self.repo, self.state)
- self._handleskippingobsolete(obsrevs, self.destmap)
-
# Keep track of the active bookmarks in order to reset them later
self.activebookmark = self.activebookmark or repo._activebookmark
if self.activebookmark:
@@ -490,19 +509,10 @@
def progress(ctx):
p.increment(item=(b"%d:%s" % (ctx.rev(), ctx)))
- allowdivergence = self.ui.configbool(
- b'experimental', b'evolution.allowdivergence'
- )
for subset in sortsource(self.destmap):
sortedrevs = self.repo.revs(b'sort(%ld, -topo)', subset)
- if not allowdivergence:
- sortedrevs -= self.repo.revs(
- b'descendants(%ld) and not %ld',
- self.obsoletewithoutsuccessorindestination,
- self.obsoletewithoutsuccessorindestination,
- )
for rev in sortedrevs:
- self._rebasenode(tr, rev, allowdivergence, progress)
+ self._rebasenode(tr, rev, progress)
p.complete()
ui.note(_(b'rebase merging completed\n'))
@@ -564,16 +574,13 @@
return newnode
- def _rebasenode(self, tr, rev, allowdivergence, progressfn):
+ def _rebasenode(self, tr, rev, progressfn):
repo, ui, opts = self.repo, self.ui, self.opts
ctx = repo[rev]
desc = _ctxdesc(ctx)
if self.state[rev] == rev:
ui.status(_(b'already rebased %s\n') % desc)
- elif (
- not allowdivergence
- and rev in self.obsoletewithoutsuccessorindestination
- ):
+ elif rev in self.obsolete_with_successor_in_rebase_set:
msg = (
_(
b'note: not rebasing %s and its descendants as '
@@ -583,8 +590,8 @@
)
repo.ui.status(msg)
self.skipped.add(rev)
- elif rev in self.obsoletenotrebased:
- succ = self.obsoletenotrebased[rev]
+ elif rev in self.obsolete_with_successor_in_destination:
+ succ = self.obsolete_with_successor_in_destination[rev]
if succ is None:
msg = _(b'note: not rebasing %s, it has no successor\n') % desc
else:
@@ -610,7 +617,7 @@
self.destmap,
self.state,
self.skipped,
- self.obsoletenotrebased,
+ self.obsolete_with_successor_in_destination,
)
if self.resume and self.wctx.p1().rev() == p1:
repo.ui.debug(b'resuming interrupted rebase\n')
@@ -722,7 +729,7 @@
self.destmap,
self.state,
self.skipped,
- self.obsoletenotrebased,
+ self.obsolete_with_successor_in_destination,
)
editopt = opts.get(b'edit')
editform = b'rebase.collapse'
@@ -1085,10 +1092,10 @@
with repo.wlock(), repo.lock():
rbsrt.restorestatus()
if rbsrt.collapsef:
- raise error.Abort(_(b"cannot stop in --collapse session"))
+ raise error.StateError(_(b"cannot stop in --collapse session"))
allowunstable = obsolete.isenabled(repo, obsolete.allowunstableopt)
if not (rbsrt.keepf or allowunstable):
- raise error.Abort(
+ raise error.StateError(
_(
b"cannot remove original changesets with"
b" unrebased descendants"
@@ -1112,6 +1119,8 @@
with ui.configoverride(overrides, b'rebase'):
return _dorebase(ui, repo, action, opts, inmemory=inmemory)
except error.InMemoryMergeConflictsError:
+ if ui.configbool(b'devel', b'rebase.force-in-memory-merge'):
+ raise
ui.warn(
_(
b'hit merge conflicts; re-running rebase without in-memory'
@@ -1210,14 +1219,16 @@
)
% help
)
- raise error.Abort(msg)
+ raise error.InputError(msg)
if rbsrt.collapsemsg and not rbsrt.collapsef:
- raise error.Abort(_(b'message can only be specified with collapse'))
+ raise error.InputError(
+ _(b'message can only be specified with collapse')
+ )
if action:
if rbsrt.collapsef:
- raise error.Abort(
+ raise error.InputError(
_(b'cannot use collapse with continue or abort')
)
if action == b'abort' and opts.get(b'tool', False):
@@ -1284,7 +1295,7 @@
cmdutil.bailifchanged(repo)
if ui.configbool(b'commands', b'rebase.requiredest') and not destf:
- raise error.Abort(
+ raise error.InputError(
_(b'you must specify a destination'),
hint=_(b'use: hg rebase -d REV'),
)
@@ -1378,7 +1389,7 @@
return None
if wdirrev in rebaseset:
- raise error.Abort(_(b'cannot rebase the working copy'))
+ raise error.InputError(_(b'cannot rebase the working copy'))
rebasingwcp = repo[b'.'].rev() in rebaseset
ui.log(
b"rebase",
@@ -1416,7 +1427,7 @@
elif size == 0:
ui.note(_(b'skipping %s - empty destination\n') % repo[r])
else:
- raise error.Abort(
+ raise error.InputError(
_(b'rebase destination for %s is not unique') % repo[r]
)
@@ -1449,7 +1460,7 @@
return nullrev
if len(parents) == 1:
return parents.pop()
- raise error.Abort(
+ raise error.StateError(
_(
b'unable to collapse on top of %d, there is more '
b'than one external parent: %s'
@@ -1649,7 +1660,7 @@
b"to force the rebase please set "
b"experimental.evolution.allowdivergence=True"
)
- raise error.Abort(msg % (b",".join(divhashes),), hint=h)
+ raise error.StateError(msg % (b",".join(divhashes),), hint=h)
def successorrevs(unfi, rev):
@@ -1752,7 +1763,7 @@
# /| # None of A and B will be changed to D and rebase fails.
# A B D
if set(newps) == set(oldps) and dest not in newps:
- raise error.Abort(
+ raise error.InputError(
_(
b'cannot rebase %d:%s without '
b'moving at least one of its parents'
@@ -1764,7 +1775,7 @@
# impossible. With multi-dest, the initial check does not cover complex
# cases since we don't have abstractions to dry-run rebase cheaply.
if any(p != nullrev and isancestor(rev, p) for p in newps):
- raise error.Abort(_(b'source is ancestor of destination'))
+ raise error.InputError(_(b'source is ancestor of destination'))
# Check if the merge will contain unwanted changes. That may happen if
# there are multiple special (non-changelog ancestor) merge bases, which
@@ -1826,7 +1837,7 @@
if revs is not None
)
)
- raise error.Abort(
+ raise error.InputError(
_(b'rebasing %d:%s will include unwanted changes from %s')
% (rev, repo[rev], unwanteddesc)
)
@@ -1971,7 +1982,7 @@
if destmap[r] not in srcset:
result.append(r)
if not result:
- raise error.Abort(_(b'source and destination form a cycle'))
+ raise error.InputError(_(b'source and destination form a cycle'))
srcset -= set(result)
yield result
@@ -1991,12 +2002,12 @@
if b'qtip' in repo.tags():
mqapplied = {repo[s.node].rev() for s in repo.mq.applied}
if set(destmap.values()) & mqapplied:
- raise error.Abort(_(b'cannot rebase onto an applied mq patch'))
+ raise error.StateError(_(b'cannot rebase onto an applied mq patch'))
# Get "cycle" error early by exhausting the generator.
sortedsrc = list(sortsource(destmap)) # a list of sorted revs
if not sortedsrc:
- raise error.Abort(_(b'no matching revisions'))
+ raise error.InputError(_(b'no matching revisions'))
# Only check the first batch of revisions to rebase not depending on other
# rebaseset. This means "source is ancestor of destination" for the second
@@ -2004,7 +2015,7 @@
# "defineparents" to do that check.
roots = list(repo.set(b'roots(%ld)', sortedsrc[0]))
if not roots:
- raise error.Abort(_(b'no matching revisions'))
+ raise error.InputError(_(b'no matching revisions'))
def revof(r):
return r.rev()
@@ -2016,7 +2027,7 @@
dest = repo[destmap[root.rev()]]
commonbase = root.ancestor(dest)
if commonbase == root:
- raise error.Abort(_(b'source is ancestor of destination'))
+ raise error.InputError(_(b'source is ancestor of destination'))
if commonbase == dest:
wctx = repo[None]
if dest == wctx.p1():
@@ -2109,7 +2120,7 @@
if ui.configbool(b'commands', b'rebase.requiredest'):
msg = _(b'rebase destination required by configuration')
hint = _(b'use hg pull followed by hg rebase -d DEST')
- raise error.Abort(msg, hint=hint)
+ raise error.InputError(msg, hint=hint)
with repo.wlock(), repo.lock():
if opts.get('update'):
@@ -2166,34 +2177,24 @@
commands.update(ui, repo)
else:
if opts.get('tool'):
- raise error.Abort(_(b'--tool can only be used with --rebase'))
+ raise error.InputError(_(b'--tool can only be used with --rebase'))
ret = orig(ui, repo, *args, **opts)
return ret
-def _filterobsoleterevs(repo, revs):
- """returns a set of the obsolete revisions in revs"""
- return {r for r in revs if repo[r].obsolete()}
-
+def _compute_obsolete_sets(repo, rebaseobsrevs, destmap):
+ """Figure out what to do about about obsolete revisions
-def _computeobsoletenotrebased(repo, rebaseobsrevs, destmap):
- """Return (obsoletenotrebased, obsoletewithoutsuccessorindestination).
-
- `obsoletenotrebased` is a mapping mapping obsolete => successor for all
+ `obsolete_with_successor_in_destination` is a mapping mapping obsolete => successor for all
obsolete nodes to be rebased given in `rebaseobsrevs`.
- `obsoletewithoutsuccessorindestination` is a set with obsolete revisions
- without a successor in destination.
-
- `obsoleteextinctsuccessors` is a set of obsolete revisions with only
- obsolete successors.
+ `obsolete_with_successor_in_rebase_set` is a set with obsolete revisions,
+ without a successor in destination, that would cause divergence.
"""
- obsoletenotrebased = {}
- obsoletewithoutsuccessorindestination = set()
- obsoleteextinctsuccessors = set()
+ obsolete_with_successor_in_destination = {}
+ obsolete_with_successor_in_rebase_set = set()
- assert repo.filtername is None
cl = repo.changelog
get_rev = cl.index.get_rev
extinctrevs = set(repo.revs(b'extinct()'))
@@ -2205,29 +2206,25 @@
successors.remove(srcnode)
succrevs = {get_rev(s) for s in successors}
succrevs.discard(None)
- if succrevs.issubset(extinctrevs):
- # all successors are extinct
- obsoleteextinctsuccessors.add(srcrev)
- if not successors:
- # no successor
- obsoletenotrebased[srcrev] = None
+ if not successors or succrevs.issubset(extinctrevs):
+ # no successor, or all successors are extinct
+ obsolete_with_successor_in_destination[srcrev] = None
else:
dstrev = destmap[srcrev]
for succrev in succrevs:
if cl.isancestorrev(succrev, dstrev):
- obsoletenotrebased[srcrev] = succrev
+ obsolete_with_successor_in_destination[srcrev] = succrev
break
else:
# If 'srcrev' has a successor in rebase set but none in
# destination (which would be catched above), we shall skip it
# and its descendants to avoid divergence.
if srcrev in extinctrevs or any(s in destmap for s in succrevs):
- obsoletewithoutsuccessorindestination.add(srcrev)
+ obsolete_with_successor_in_rebase_set.add(srcrev)
return (
- obsoletenotrebased,
- obsoletewithoutsuccessorindestination,
- obsoleteextinctsuccessors,
+ obsolete_with_successor_in_destination,
+ obsolete_with_successor_in_rebase_set,
)
diff -r bc268ea9f984 -r f67b8946bb1b hgext/releasenotes.py
--- a/hgext/releasenotes.py Thu Mar 25 19:06:28 2021 -0400
+++ b/hgext/releasenotes.py Tue Apr 20 11:01:06 2021 -0400
@@ -280,7 +280,7 @@
if b'.hgreleasenotes' in ctx:
read(b'.hgreleasenotes')
- return p[b'sections']
+ return p.items(b'sections')
def checkadmonitions(ui, repo, directives, revs):
diff -r bc268ea9f984 -r f67b8946bb1b hgext/relink.py
--- a/hgext/relink.py Thu Mar 25 19:06:28 2021 -0400
+++ b/hgext/relink.py Tue Apr 20 11:01:06 2021 -0400
@@ -19,7 +19,10 @@
registrar,
util,
)
-from mercurial.utils import stringutil
+from mercurial.utils import (
+ stringutil,
+ urlutil,
+)
cmdtable = {}
command = registrar.command(cmdtable)
@@ -62,10 +65,11 @@
util, b'samedevice'
):
raise error.Abort(_(b'hardlinks are not supported on this system'))
- src = hg.repository(
- repo.baseui,
- ui.expandpath(origin or b'default-relink', origin or b'default'),
- )
+
+ if origin is None and b'default-relink' in ui.paths:
+ origin = b'default-relink'
+ path, __ = urlutil.get_unique_pull_path(b'relink', repo, ui, origin)
+ src = hg.repository(repo.baseui, path)
ui.status(_(b'relinking %s to %s\n') % (src.store.path, repo.store.path))
if repo.root == src.root:
ui.status(_(b'there is nothing to relink\n'))
diff -r bc268ea9f984 -r f67b8946bb1b hgext/remotefilelog/__init__.py
--- a/hgext/remotefilelog/__init__.py Thu Mar 25 19:06:28 2021 -0400
+++ b/hgext/remotefilelog/__init__.py Tue Apr 20 11:01:06 2021 -0400
@@ -215,6 +215,8 @@
configitem(b'remotefilelog', b'backgroundprefetch', default=False)
configitem(b'remotefilelog', b'prefetchdelay', default=120)
configitem(b'remotefilelog', b'prefetchdays', default=14)
+# Other values include 'local' or 'none'. Any unrecognized value is 'all'.
+configitem(b'remotefilelog', b'strip.includefiles', default='all')
configitem(b'remotefilelog', b'getfilesstep', default=10000)
configitem(b'remotefilelog', b'getfilestype', default=b'optimistic')
@@ -886,7 +888,7 @@
progress.update(count)
count += 1
try:
- path = ui.expandpath(os.path.normpath(path))
+ path = util.expandpath(os.path.normpath(path))
except TypeError as e:
ui.warn(_(b"warning: malformed path: %r:%s\n") % (path, e))
traceback.print_exc()
diff -r bc268ea9f984 -r f67b8946bb1b hgext/remotefilelog/connectionpool.py
--- a/hgext/remotefilelog/connectionpool.py Thu Mar 25 19:06:28 2021 -0400
+++ b/hgext/remotefilelog/connectionpool.py Tue Apr 20 11:01:06 2021 -0400
@@ -8,7 +8,6 @@
from __future__ import absolute_import
from mercurial import (
- extensions,
hg,
pycompat,
sshpeer,
@@ -43,17 +42,19 @@
if conn is None:
- def _cleanup(orig):
- # close pipee first so peer.cleanup reading it won't deadlock,
- # if there are other processes with pipeo open (i.e. us).
- peer = orig.im_self
- if util.safehasattr(peer, 'pipee'):
- peer.pipee.close()
- return orig()
+ peer = hg.peer(self._repo.ui, {}, path)
+ if util.safehasattr(peer, '_cleanup'):
- peer = hg.peer(self._repo.ui, {}, path)
- if util.safehasattr(peer, 'cleanup'):
- extensions.wrapfunction(peer, b'cleanup', _cleanup)
+ class mypeer(peer.__class__):
+ def _cleanup(self, warn=None):
+ # close pipee first so peer.cleanup reading it won't
+ # deadlock, if there are other processes with pipeo
+ # open (i.e. us).
+ if util.safehasattr(self, 'pipee'):
+ self.pipee.close()
+ return super(mypeer, self)._cleanup()
+
+ peer.__class__ = mypeer
conn = connection(pathpool, peer)
diff -r bc268ea9f984 -r f67b8946bb1b hgext/remotefilelog/contentstore.py
--- a/hgext/remotefilelog/contentstore.py Thu Mar 25 19:06:28 2021 -0400
+++ b/hgext/remotefilelog/contentstore.py Tue Apr 20 11:01:06 2021 -0400
@@ -365,7 +365,7 @@
ledger.markdataentry(self, treename, node)
ledger.markhistoryentry(self, treename, node)
- for path, encoded, size in self._store.datafiles():
+ for t, path, encoded, size in self._store.datafiles():
if path[:5] != b'meta/' or path[-2:] != b'.i':
continue
diff -r bc268ea9f984 -r f67b8946bb1b hgext/remotefilelog/remotefilelog.py
--- a/hgext/remotefilelog/remotefilelog.py Thu Mar 25 19:06:28 2021 -0400
+++ b/hgext/remotefilelog/remotefilelog.py Tue Apr 20 11:01:06 2021 -0400
@@ -155,12 +155,12 @@
# text passed to "addrevision" includes hg filelog metadata header
if node is None:
node = storageutil.hashrevisionsha1(text, p1, p2)
- if sidedata is None:
- sidedata = {}
meta, metaoffset = storageutil.parsemeta(text)
rawtext, validatehash = flagutil.processflagswrite(
- self, text, flags, sidedata=sidedata
+ self,
+ text,
+ flags,
)
return self.addrawrevision(
rawtext,
@@ -306,6 +306,7 @@
assumehaveparentrevisions=False,
deltaprevious=False,
deltamode=None,
+ sidedata_helpers=None,
):
# we don't use any of these parameters here
del nodesorder, revisiondata, assumehaveparentrevisions, deltaprevious
@@ -333,6 +334,8 @@
baserevisionsize=None,
revision=revision,
delta=delta,
+ # Sidedata is not supported yet
+ sidedata=None,
)
def revdiff(self, node1, node2):
diff -r bc268ea9f984 -r f67b8946bb1b hgext/remotefilelog/remotefilelogserver.py
--- a/hgext/remotefilelog/remotefilelogserver.py Thu Mar 25 19:06:28 2021 -0400
+++ b/hgext/remotefilelog/remotefilelogserver.py Tue Apr 20 11:01:06 2021 -0400
@@ -164,24 +164,26 @@
b'.d'
):
n = util.pconvert(fp[striplen:])
- yield (store.decodedir(n), n, st.st_size)
+ d = store.decodedir(n)
+ t = store.FILETYPE_OTHER
+ yield (t, d, n, st.st_size)
if kind == stat.S_IFDIR:
visit.append(fp)
if scmutil.istreemanifest(repo):
- for (u, e, s) in repo.store.datafiles():
+ for (t, u, e, s) in repo.store.datafiles():
if u.startswith(b'meta/') and (
u.endswith(b'.i') or u.endswith(b'.d')
):
- yield (u, e, s)
+ yield (t, u, e, s)
# Return .d and .i files that do not match the shallow pattern
match = state.match
if match and not match.always():
- for (u, e, s) in repo.store.datafiles():
+ for (t, u, e, s) in repo.store.datafiles():
f = u[5:-2] # trim data/... and .i/.d
if not state.match(f):
- yield (u, e, s)
+ yield (t, u, e, s)
for x in repo.store.topfiles():
if state.noflatmf and x[0][:11] == b'00manifest.':
diff -r bc268ea9f984 -r f67b8946bb1b hgext/remotefilelog/shallowbundle.py
--- a/hgext/remotefilelog/shallowbundle.py Thu Mar 25 19:06:28 2021 -0400
+++ b/hgext/remotefilelog/shallowbundle.py Tue Apr 20 11:01:06 2021 -0400
@@ -67,7 +67,7 @@
shallowcg1packer, self, nodelist, rlog, lookup, units=units
)
- def generatefiles(self, changedfiles, *args):
+ def generatefiles(self, changedfiles, *args, **kwargs):
try:
linknodes, commonrevs, source = args
except ValueError:
@@ -92,7 +92,9 @@
[f for f in changedfiles if not repo.shallowmatch(f)]
)
- return super(shallowcg1packer, self).generatefiles(changedfiles, *args)
+ return super(shallowcg1packer, self).generatefiles(
+ changedfiles, *args, **kwargs
+ )
def shouldaddfilegroups(self, source):
repo = self._repo
@@ -102,6 +104,18 @@
if source == b"push" or source == b"bundle":
return AllFiles
+ # We won't actually strip the files, but we should put them in any
+ # backup bundle generated by strip (especially for cases like narrow's
+ # `hg tracked --removeinclude`, as failing to do so means that the
+ # "saved" changesets during a strip won't have their files reapplied and
+ # thus their linknode adjusted, if necessary).
+ if source == b"strip":
+ cfg = repo.ui.config(b'remotefilelog', b'strip.includefiles')
+ if cfg == b'local':
+ return LocalFiles
+ elif cfg != b'none':
+ return AllFiles
+
caps = self._bundlecaps or []
if source == b"serve" or source == b"pull":
if constants.BUNDLE2_CAPABLITY in caps:
@@ -176,9 +190,11 @@
repo.shallowmatch = original
-def addchangegroupfiles(orig, repo, source, revmap, trp, expectedfiles, *args):
+def addchangegroupfiles(
+ orig, repo, source, revmap, trp, expectedfiles, *args, **kwargs
+):
if not shallowutil.isenabled(repo):
- return orig(repo, source, revmap, trp, expectedfiles, *args)
+ return orig(repo, source, revmap, trp, expectedfiles, *args, **kwargs)
newfiles = 0
visited = set()
@@ -272,7 +288,7 @@
revisiondata = revisiondatas[(f, node)]
# revisiondata: (node, p1, p2, cs, deltabase, delta, flags)
- node, p1, p2, linknode, deltabase, delta, flags = revisiondata
+ node, p1, p2, linknode, deltabase, delta, flags, sidedata = revisiondata
if not available(f, node, f, deltabase):
continue
diff -r bc268ea9f984 -r f67b8946bb1b hgext/schemes.py
--- a/hgext/schemes.py Thu Mar 25 19:06:28 2021 -0400
+++ b/hgext/schemes.py Tue Apr 20 11:01:06 2021 -0400
@@ -52,7 +52,9 @@
pycompat,
registrar,
templater,
- util,
+)
+from mercurial.utils import (
+ urlutil,
)
cmdtable = {}
@@ -86,7 +88,7 @@
)
def resolve(self, url):
- # Should this use the util.url class, or is manual parsing better?
+ # Should this use the urlutil.url class, or is manual parsing better?
try:
url = url.split(b'://', 1)[1]
except IndexError:
@@ -137,7 +139,7 @@
)
hg.schemes[scheme] = ShortRepository(url, scheme, t)
- extensions.wrapfunction(util, b'hasdriveletter', hasdriveletter)
+ extensions.wrapfunction(urlutil, b'hasdriveletter', hasdriveletter)
@command(b'debugexpandscheme', norepo=True)
diff -r bc268ea9f984 -r f67b8946bb1b hgext/share.py
--- a/hgext/share.py Thu Mar 25 19:06:28 2021 -0400
+++ b/hgext/share.py Tue Apr 20 11:01:06 2021 -0400
@@ -1,4 +1,4 @@
-# Copyright 2006, 2007 Matt Mackall
+# Copyright 2006, 2007 Olivia Mackall
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
diff -r bc268ea9f984 -r f67b8946bb1b hgext/split.py
--- a/hgext/split.py Thu Mar 25 19:06:28 2021 -0400
+++ b/hgext/split.py Tue Apr 20 11:01:06 2021 -0400
@@ -12,7 +12,7 @@
from mercurial.i18n import _
from mercurial.node import (
- nullid,
+ nullrev,
short,
)
@@ -27,6 +27,7 @@
revsetlang,
rewriteutil,
scmutil,
+ util,
)
# allow people to use split without explicitly enabling rebase extension
@@ -69,57 +70,62 @@
if opts.get(b'rev'):
revlist.append(opts.get(b'rev'))
revlist.extend(revs)
- with repo.wlock(), repo.lock(), repo.transaction(b'split') as tr:
- revs = scmutil.revrange(repo, revlist or [b'.'])
- if len(revs) > 1:
- raise error.InputError(_(b'cannot split multiple revisions'))
+ with repo.wlock(), repo.lock():
+ tr = repo.transaction(b'split')
+ # If the rebase somehow runs into conflicts, make sure
+ # we close the transaction so the user can continue it.
+ with util.acceptintervention(tr):
+ revs = scmutil.revrange(repo, revlist or [b'.'])
+ if len(revs) > 1:
+ raise error.InputError(_(b'cannot split multiple revisions'))
- rev = revs.first()
- ctx = repo[rev]
- # Handle nullid specially here (instead of leaving for precheck()
- # below) so we get a nicer message and error code.
- if rev is None or ctx.node() == nullid:
- ui.status(_(b'nothing to split\n'))
- return 1
- if ctx.node() is None:
- raise error.InputError(_(b'cannot split working directory'))
+ rev = revs.first()
+ # Handle nullrev specially here (instead of leaving for precheck()
+ # below) so we get a nicer message and error code.
+ if rev is None or rev == nullrev:
+ ui.status(_(b'nothing to split\n'))
+ return 1
+ ctx = repo[rev]
+ if ctx.node() is None:
+ raise error.InputError(_(b'cannot split working directory'))
- if opts.get(b'rebase'):
- # Skip obsoleted descendants and their descendants so the rebase
- # won't cause conflicts for sure.
- descendants = list(repo.revs(b'(%d::) - (%d)', rev, rev))
- torebase = list(
- repo.revs(
- b'%ld - (%ld & obsolete())::', descendants, descendants
+ if opts.get(b'rebase'):
+ # Skip obsoleted descendants and their descendants so the rebase
+ # won't cause conflicts for sure.
+ descendants = list(repo.revs(b'(%d::) - (%d)', rev, rev))
+ torebase = list(
+ repo.revs(
+ b'%ld - (%ld & obsolete())::', descendants, descendants
+ )
)
- )
- else:
- torebase = []
- rewriteutil.precheck(repo, [rev] + torebase, b'split')
+ else:
+ torebase = []
+ rewriteutil.precheck(repo, [rev] + torebase, b'split')
- if len(ctx.parents()) > 1:
- raise error.InputError(_(b'cannot split a merge changeset'))
+ if len(ctx.parents()) > 1:
+ raise error.InputError(_(b'cannot split a merge changeset'))
- cmdutil.bailifchanged(repo)
+ cmdutil.bailifchanged(repo)
- # Deactivate bookmark temporarily so it won't get moved unintentionally
- bname = repo._activebookmark
- if bname and repo._bookmarks[bname] != ctx.node():
- bookmarks.deactivate(repo)
+ # Deactivate bookmark temporarily so it won't get moved
+ # unintentionally
+ bname = repo._activebookmark
+ if bname and repo._bookmarks[bname] != ctx.node():
+ bookmarks.deactivate(repo)
- wnode = repo[b'.'].node()
- top = None
- try:
- top = dosplit(ui, repo, tr, ctx, opts)
- finally:
- # top is None: split failed, need update --clean recovery.
- # wnode == ctx.node(): wnode split, no need to update.
- if top is None or wnode != ctx.node():
- hg.clean(repo, wnode, show_stats=False)
- if bname:
- bookmarks.activate(repo, bname)
- if torebase and top:
- dorebase(ui, repo, torebase, top)
+ wnode = repo[b'.'].node()
+ top = None
+ try:
+ top = dosplit(ui, repo, tr, ctx, opts)
+ finally:
+ # top is None: split failed, need update --clean recovery.
+ # wnode == ctx.node(): wnode split, no need to update.
+ if top is None or wnode != ctx.node():
+ hg.clean(repo, wnode, show_stats=False)
+ if bname:
+ bookmarks.activate(repo, bname)
+ if torebase and top:
+ dorebase(ui, repo, torebase, top)
def dosplit(ui, repo, tr, ctx, opts):
@@ -165,19 +171,26 @@
b'message': header + ctx.description(),
}
)
+ origctx = repo[b'.']
commands.commit(ui, repo, **pycompat.strkwargs(opts))
newctx = repo[b'.']
- committed.append(newctx)
+ # Ensure user didn't do a "no-op" split (such as deselecting
+ # everything).
+ if origctx.node() != newctx.node():
+ committed.append(newctx)
if not committed:
raise error.InputError(_(b'cannot split an empty revision'))
- scmutil.cleanupnodes(
- repo,
- {ctx.node(): [c.node() for c in committed]},
- operation=b'split',
- fixphase=True,
- )
+ if len(committed) != 1 or committed[0].node() != ctx.node():
+ # Ensure we don't strip a node if we produce the same commit as already
+ # exists
+ scmutil.cleanupnodes(
+ repo,
+ {ctx.node(): [c.node() for c in committed]},
+ operation=b'split',
+ fixphase=True,
+ )
return committed[-1]
diff -r bc268ea9f984 -r f67b8946bb1b hgext/sqlitestore.py
--- a/hgext/sqlitestore.py Thu Mar 25 19:06:28 2021 -0400
+++ b/hgext/sqlitestore.py Tue Apr 20 11:01:06 2021 -0400
@@ -54,6 +54,7 @@
from mercurial.node import (
nullid,
nullrev,
+ sha1nodeconstants,
short,
)
from mercurial.thirdparty import attr
@@ -288,6 +289,7 @@
baserevisionsize = attr.ib()
revision = attr.ib()
delta = attr.ib()
+ sidedata = attr.ib()
linknode = attr.ib(default=None)
@@ -304,6 +306,7 @@
"""Implements storage for an individual tracked path."""
def __init__(self, db, path, compression):
+ self.nullid = sha1nodeconstants.nullid
self._db = db
self._path = path
@@ -586,6 +589,7 @@
revisiondata=False,
assumehaveparentrevisions=False,
deltamode=repository.CG_DELTAMODE_STD,
+ sidedata_helpers=None,
):
if nodesorder not in (b'nodes', b'storage', b'linear', None):
raise error.ProgrammingError(
@@ -624,6 +628,7 @@
revisiondata=revisiondata,
assumehaveparentrevisions=assumehaveparentrevisions,
deltamode=deltamode,
+ sidedata_helpers=sidedata_helpers,
):
yield delta
@@ -636,7 +641,8 @@
if meta or filedata.startswith(b'\x01\n'):
filedata = storageutil.packmeta(meta, filedata)
- return self.addrevision(filedata, transaction, linkrev, p1, p2)
+ rev = self.addrevision(filedata, transaction, linkrev, p1, p2)
+ return self.node(rev)
def addrevision(
self,
@@ -658,15 +664,16 @@
if validatehash:
self._checkhash(revisiondata, node, p1, p2)
- if node in self._nodetorev:
- return node
+ rev = self._nodetorev.get(node)
+ if rev is not None:
+ return rev
- node = self._addrawrevision(
+ rev = self._addrawrevision(
node, revisiondata, transaction, linkrev, p1, p2
)
self._revisioncache[node] = revisiondata
- return node
+ return rev
def addgroup(
self,
@@ -679,7 +686,16 @@
):
empty = True
- for node, p1, p2, linknode, deltabase, delta, wireflags in deltas:
+ for (
+ node,
+ p1,
+ p2,
+ linknode,
+ deltabase,
+ delta,
+ wireflags,
+ sidedata,
+ ) in deltas:
storeflags = 0
if wireflags & repository.REVISION_FLAG_CENSORED:
@@ -741,7 +757,7 @@
)
if duplicaterevisioncb:
- duplicaterevisioncb(self, node)
+ duplicaterevisioncb(self, self.rev(node))
empty = False
continue
@@ -752,7 +768,7 @@
text = None
storedelta = (deltabase, delta)
- self._addrawrevision(
+ rev = self._addrawrevision(
node,
text,
transaction,
@@ -764,7 +780,7 @@
)
if addrevisioncb:
- addrevisioncb(self, node)
+ addrevisioncb(self, rev)
empty = False
return not empty
@@ -897,6 +913,10 @@
def files(self):
return []
+ def sidedata(self, nodeorrev, _df=None):
+ # Not supported for now
+ return {}
+
def storageinfo(
self,
exclusivefiles=False,
@@ -1079,7 +1099,7 @@
self._revtonode[rev] = node
self._revisions[node] = entry
- return node
+ return rev
class sqliterepository(localrepo.localrepository):
diff -r bc268ea9f984 -r f67b8946bb1b hgext/transplant.py
--- a/hgext/transplant.py Thu Mar 25 19:06:28 2021 -0400
+++ b/hgext/transplant.py Tue Apr 20 11:01:06 2021 -0400
@@ -47,6 +47,7 @@
from mercurial.utils import (
procutil,
stringutil,
+ urlutil,
)
@@ -818,7 +819,8 @@
sourcerepo = opts.get(b'source')
if sourcerepo:
- peer = hg.peer(repo, opts, ui.expandpath(sourcerepo))
+ u = urlutil.get_unique_pull_path(b'transplant', repo, ui, sourcerepo)[0]
+ peer = hg.peer(repo, opts, u)
heads = pycompat.maplist(peer.lookup, opts.get(b'branch', ()))
target = set(heads)
for r in revs:
diff -r bc268ea9f984 -r f67b8946bb1b hgext/uncommit.py
--- a/hgext/uncommit.py Thu Mar 25 19:06:28 2021 -0400
+++ b/hgext/uncommit.py Tue Apr 20 11:01:06 2021 -0400
@@ -175,7 +175,7 @@
old = repo[b'.']
rewriteutil.precheck(repo, [old.rev()], b'uncommit')
if len(old.parents()) > 1:
- raise error.Abort(_(b"cannot uncommit merge changeset"))
+ raise error.InputError(_(b"cannot uncommit merge changeset"))
match = scmutil.match(old, pats, opts)
@@ -202,7 +202,7 @@
else:
hint = _(b"file does not exist")
- raise error.Abort(
+ raise error.InputError(
_(b'cannot uncommit "%s"') % scmutil.getuipathfn(repo)(f),
hint=hint,
)
@@ -280,7 +280,7 @@
markers = list(predecessormarkers(curctx))
if len(markers) != 1:
e = _(b"changeset must have one predecessor, found %i predecessors")
- raise error.Abort(e % len(markers))
+ raise error.InputError(e % len(markers))
prednode = markers[0].prednode()
predctx = unfi[prednode]
diff -r bc268ea9f984 -r f67b8946bb1b hgext/win32text.py
--- a/hgext/win32text.py Thu Mar 25 19:06:28 2021 -0400
+++ b/hgext/win32text.py Tue Apr 20 11:01:06 2021 -0400
@@ -1,6 +1,6 @@
# win32text.py - LF <-> CRLF/CR translation utilities for Windows/Mac users
#
-# Copyright 2005, 2007-2009 Matt Mackall and others
+# Copyright 2005, 2007-2009 Olivia Mackall and others
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
diff -r bc268ea9f984 -r f67b8946bb1b hgext/zeroconf/__init__.py
--- a/hgext/zeroconf/__init__.py Thu Mar 25 19:06:28 2021 -0400
+++ b/hgext/zeroconf/__init__.py Tue Apr 20 11:01:06 2021 -0400
@@ -1,6 +1,6 @@
# zeroconf.py - zeroconf support for Mercurial
#
-# Copyright 2005-2007 Matt Mackall
+# Copyright 2005-2007 Olivia Mackall
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
diff -r bc268ea9f984 -r f67b8946bb1b i18n/da.po
--- a/i18n/da.po Thu Mar 25 19:06:28 2021 -0400
+++ b/i18n/da.po Tue Apr 20 11:01:06 2021 -0400
@@ -1,6 +1,6 @@
# Danish translations for Mercurial
# Danske oversættelser for Mercurial
-# Copyright (C) 2009, 2010 Matt Mackall and others
+# Copyright (C) 2009, 2010 Olivia Mackall and others
#
# Translation dictionary:
#
@@ -11359,11 +11359,11 @@
msgstr "(se http://mercurial.selenic.com for mere information)"
msgid ""
-"Copyright (C) 2005-2011 Matt Mackall and others\n"
+"Copyright (C) 2005-2011 Olivia Mackall and others\n"
"This is free software; see the source for copying conditions. There is NO\n"
"warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
msgstr ""
-"Copyright (C) 2005-2011 Matt Mackall og andre\n"
+"Copyright (C) 2005-2011 Olivia Mackall og andre\n"
"Dette er frit programmel; se kildekoden for kopieringsbetingelser. Der\n"
"gives INGEN GARANTI; ikke engang for SALGBARHED eller EGNETHED FOR\n"
"NOGET BESTEMT FORMÅL.\n"
diff -r bc268ea9f984 -r f67b8946bb1b i18n/de.po
--- a/i18n/de.po Thu Mar 25 19:06:28 2021 -0400
+++ b/i18n/de.po Tue Apr 20 11:01:06 2021 -0400
@@ -1,6 +1,6 @@
# German translations for Mercurial
# Deutsche Übersetzungen für Mercurial
-# Copyright (C) 2009 Matt Mackall and others
+# Copyright (C) 2009 Olivia Mackall and others
#
# Übersetzer:
# Tobias Bell
@@ -14536,11 +14536,11 @@
msgstr "(siehe http://mercurial.selenic.com für mehr Information)"
msgid ""
-"Copyright (C) 2005-2014 Matt Mackall and others\n"
+"Copyright (C) 2005-2014 Olivia Mackall and others\n"
"This is free software; see the source for copying conditions. There is NO\n"
"warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
msgstr ""
-"Copyright (C) 2005-2014 Matt Mackall und andere\n"
+"Copyright (C) 2005-2014 Olivia Mackall und andere\n"
"Dies ist freie Software; siehe Quellen für Kopierbestimmungen. Es besteht\n"
"KEINE Gewährleistung für das Programm, nicht einmal der Marktreife oder der\n"
"Verwendbarkeit für einen bestimmten Zweck.\n"
@@ -18893,7 +18893,7 @@
msgstr ""
msgid ""
-":Author: Matt Mackall \n"
+":Author: Olivia Mackall \n"
":Organization: Mercurial\n"
":Manual section: 1\n"
":Manual group: Mercurial Manual"
@@ -19032,7 +19032,7 @@
msgid ""
"Author\n"
"\"\"\"\"\"\"\n"
-"Written by Matt Mackall "
+"Written by Olivia Mackall "
msgstr ""
msgid ""
@@ -19050,7 +19050,7 @@
msgid ""
"Copying\n"
"\"\"\"\"\"\"\"\n"
-"Copyright (C) 2005-2014 Matt Mackall.\n"
+"Copyright (C) 2005-2014 Olivia Mackall.\n"
"Free use of this software is granted under the terms of the GNU General\n"
"Public License version 2 or any later version."
msgstr ""
@@ -19088,7 +19088,7 @@
"Vadim Gelfer "
msgstr ""
-msgid "Mercurial was written by Matt Mackall ."
+msgid "Mercurial was written by Olivia Mackall ."
msgstr ""
msgid ""
@@ -19101,7 +19101,7 @@
"Copying\n"
"=======\n"
"This manual page is copyright 2006 Vadim Gelfer.\n"
-"Mercurial is copyright 2005-2014 Matt Mackall.\n"
+"Mercurial is copyright 2005-2014 Olivia Mackall.\n"
"Free use of this software is granted under the terms of the GNU General\n"
"Public License version 2 or any later version."
msgstr ""
@@ -19307,7 +19307,7 @@
"Copying\n"
"=======\n"
"This manual page is copyright 2005 Bryan O'Sullivan.\n"
-"Mercurial is copyright 2005-2014 Matt Mackall.\n"
+"Mercurial is copyright 2005-2014 Olivia Mackall.\n"
"Free use of this software is granted under the terms of the GNU General\n"
"Public License version 2 or any later version."
msgstr ""
diff -r bc268ea9f984 -r f67b8946bb1b i18n/el.po
--- a/i18n/el.po Thu Mar 25 19:06:28 2021 -0400
+++ b/i18n/el.po Tue Apr 20 11:01:06 2021 -0400
@@ -1,7 +1,7 @@
# Greek translations for Mercurial
# Ελληνική μετάφραση των μηνυμάτων του Mercurial
#
-# Copyright (C) 2009 Matt Mackall και άλλοι
+# Copyright (C) 2009 Olivia Mackall και άλλοι
#
msgid ""
msgstr ""
@@ -7606,12 +7606,12 @@
msgid ""
"\n"
-"Copyright (C) 2005-2010 Matt Mackall and others\n"
+"Copyright (C) 2005-2010 Olivia Mackall and others\n"
"This is free software; see the source for copying conditions. There is NO\n"
"warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
msgstr ""
"\n"
-"Πνευματικά δικαιώματα (C) 2005-2009 Matt Mackall και άλλοι\n"
+"Πνευματικά δικαιώματα (C) 2005-2009 Olivia Mackall και άλλοι\n"
"Αυτό το πρόγραμμα είναι ελεύθερο λογισμικό· δείτε τον πηγαίο κώδικα για\n"
"την άδεια χρήσης του. Δεν παρέχεται ΚΑΜΙΑ εγγύηση· ούτε καν για την\n"
"ΕΜΠΟΡΕΥΣΙΜΟΤΗΤΑ ή την ΚΑΤΑΛΛΗΛΟΤΗΤΑ ΓΙΑ ΚΑΠΟΙΟ ΣΚΟΠΟ.\n"
diff -r bc268ea9f984 -r f67b8946bb1b i18n/fr.po
--- a/i18n/fr.po Thu Mar 25 19:06:28 2021 -0400
+++ b/i18n/fr.po Tue Apr 20 11:01:06 2021 -0400
@@ -1,6 +1,6 @@
# French translations for Mercurial
# Traductions françaises de Mercurial
-# Copyright (C) 2009 Matt Mackall and others
+# Copyright (C) 2009 Olivia Mackall and others
#
# Quelques règles :
# - dans l'aide d'une commande, la première ligne descriptive
@@ -9412,7 +9412,7 @@
msgid ""
"\n"
-"Copyright (C) 2005-2010 Matt Mackall and others\n"
+"Copyright (C) 2005-2010 Olivia Mackall and others\n"
"This is free software; see the source for copying conditions. There is NO\n"
"warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
msgstr ""
diff -r bc268ea9f984 -r f67b8946bb1b i18n/hggettext
--- a/i18n/hggettext Thu Mar 25 19:06:28 2021 -0400
+++ b/i18n/hggettext Tue Apr 20 11:01:06 2021 -0400
@@ -2,7 +2,7 @@
#
# hggettext - carefully extract docstrings for Mercurial
#
-# Copyright 2009 Matt Mackall and others
+# Copyright 2009 Olivia Mackall and others
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
diff -r bc268ea9f984 -r f67b8946bb1b i18n/it.po
--- a/i18n/it.po Thu Mar 25 19:06:28 2021 -0400
+++ b/i18n/it.po Tue Apr 20 11:01:06 2021 -0400
@@ -1,6 +1,6 @@
# Italian translations for Mercurial
# Traduzione italiana per Mercurial
-# Copyright (C) 2009 Matt Mackall and others
+# Copyright (C) 2009 Olivia Mackall and others
msgid ""
msgstr ""
"Project-Id-Version: Mercurial\n"
@@ -8881,11 +8881,11 @@
msgstr ""
msgid ""
-"Copyright (C) 2005-2011 Matt Mackall and others\n"
+"Copyright (C) 2005-2011 Olivia Mackall and others\n"
"This is free software; see the source for copying conditions. There is NO\n"
"warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
msgstr ""
-"Copyright (C) 2005-2011 Matt Mackall e altri\n"
+"Copyright (C) 2005-2011 Olivia Mackall e altri\n"
"Questo è software libero; vedere i sorgenti per le condizioni di copia.\n"
"Non c'è ALCUNA garanzia; neppure di COMMERCIABILITÀ o IDONEITÀ AD UNO\n"
"SCOPO PARTICOLARE.\n"
diff -r bc268ea9f984 -r f67b8946bb1b i18n/ja.po
--- a/i18n/ja.po Thu Mar 25 19:06:28 2021 -0400
+++ b/i18n/ja.po Tue Apr 20 11:01:06 2021 -0400
@@ -18771,11 +18771,11 @@
msgstr "(詳細は https://mercurial-scm.org を参照)"
msgid ""
-"Copyright (C) 2005-2018 Matt Mackall and others\n"
+"Copyright (C) 2005-2018 Olivia Mackall and others\n"
"This is free software; see the source for copying conditions. There is NO\n"
"warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
msgstr ""
-"Copyright (C) 2005-2018 Matt Mackall and others\n"
+"Copyright (C) 2005-2018 Olivia Mackall and others\n"
"本製品はフリーソフトウェアです。\n"
"頒布条件に関しては同梱されるライセンス条項をお読みください。\n"
"市場適合性や特定用途への可否を含め、 本製品は無保証です。\n"
@@ -30239,11 +30239,11 @@
msgid ""
"Author\n"
"\"\"\"\"\"\"\n"
-"Written by Matt Mackall "
+"Written by Olivia Mackall "
msgstr ""
"著者\n"
"\"\"\"\"\n"
-"Matt Mackall "
+"Olivia Mackall "
msgid ""
"Resources\n"
@@ -30264,13 +30264,13 @@
msgid ""
"Copying\n"
"\"\"\"\"\"\"\"\n"
-"Copyright (C) 2005-2016 Matt Mackall.\n"
+"Copyright (C) 2005-2016 Olivia Mackall.\n"
"Free use of this software is granted under the terms of the GNU General\n"
"Public License version 2 or any later version."
msgstr ""
"Copying\n"
"\"\"\"\"\"\"\"\n"
-"Copyright (C) 2005-2016 Matt Mackall.\n"
+"Copyright (C) 2005-2016 Olivia Mackall.\n"
"本ソフトウェアは、 バージョン2またはそれ以降の GNU General\n"
"Public License の元での自由な利用が保証されています。"
@@ -30293,12 +30293,12 @@
"----------------------------------"
msgid ""
-":Author: Matt Mackall \n"
+":Author: Olivia Mackall \n"
":Organization: Mercurial\n"
":Manual section: 1\n"
":Manual group: Mercurial Manual"
msgstr ""
-":Author: Matt Mackall \n"
+":Author: Olivia Mackall \n"
":Organization: Mercurial\n"
":Manual section: 1\n"
":Manual group: Mercurial Manual"
@@ -30471,13 +30471,13 @@
msgid ""
"Copying\n"
"\"\"\"\"\"\"\"\n"
-"Copyright (C) 2005-2018 Matt Mackall.\n"
+"Copyright (C) 2005-2018 Olivia Mackall.\n"
"Free use of this software is granted under the terms of the GNU General\n"
"Public License version 2 or any later version."
msgstr ""
"Copying\n"
"\"\"\"\"\"\"\"\n"
-"Copyright (C) 2005-2016 Matt Mackall.\n"
+"Copyright (C) 2005-2016 Olivia Mackall.\n"
"本ソフトウェアは、 バージョン2またはそれ以降の GNU General\n"
"Public License の元での自由な利用が保証されています。"
@@ -30519,8 +30519,8 @@
"====\n"
"本マニュアルページの著者は Vadim Gelfer です。"
-msgid "Mercurial was written by Matt Mackall ."
-msgstr "Mercurial の著者は Matt Mackall です。"
+msgid "Mercurial was written by Olivia Mackall ."
+msgstr "Mercurial の著者は Olivia Mackall です。"
msgid ""
"See Also\n"
@@ -30536,14 +30536,14 @@
"Copying\n"
"=======\n"
"This manual page is copyright 2006 Vadim Gelfer.\n"
-"Mercurial is copyright 2005-2018 Matt Mackall.\n"
+"Mercurial is copyright 2005-2018 Olivia Mackall.\n"
"Free use of this software is granted under the terms of the GNU General\n"
"Public License version 2 or any later version."
msgstr ""
"Copying\n"
"=======\n"
"本マニュアルページの著作権は copyright 2006 Vadim Gelfer です。\n"
-"Mercurial の著作権は copyright 2005-2017 Matt Mackall です。\n"
+"Mercurial の著作権は copyright 2005-2017 Olivia Mackall です。\n"
"本ソフトウェアは、 バージョン2またはそれ以降の GNU General\n"
"Public License の元での自由な利用が保証されています。"
@@ -30825,14 +30825,14 @@
"Copying\n"
"=======\n"
"This manual page is copyright 2005 Bryan O'Sullivan.\n"
-"Mercurial is copyright 2005-2018 Matt Mackall.\n"
+"Mercurial is copyright 2005-2018 Olivia Mackall.\n"
"Free use of this software is granted under the terms of the GNU General\n"
"Public License version 2 or any later version."
msgstr ""
"Copying\n"
"=======\n"
"本マニュアルの著作権は copyright 2005 Bryan O'Sullivan です。\n"
-"Mercurial の著作権は copyright 2005-2017 Matt Mackall です。\n"
+"Mercurial の著作権は copyright 2005-2017 Olivia Mackall です。\n"
"本ソフトウェアは、 バージョン2またはそれ以降の GNU General\n"
"Public License の元での自由な利用が保証されています。"
@@ -39790,13 +39790,13 @@
#~ msgid ""
#~ "Copying\n"
#~ "\"\"\"\"\"\"\"\n"
-#~ "Copyright (C) 2005-2017 Matt Mackall.\n"
+#~ "Copyright (C) 2005-2017 Olivia Mackall.\n"
#~ "Free use of this software is granted under the terms of the GNU General\n"
#~ "Public License version 2 or any later version."
#~ msgstr ""
#~ "Copying\n"
#~ "\"\"\"\"\"\"\"\n"
-#~ "Copyright (C) 2005-2017 Matt Mackall.\n"
+#~ "Copyright (C) 2005-2017 Olivia Mackall.\n"
#~ "本ソフトウェアは、 バージョン2またはそれ以降の GNU General\n"
#~ "Public License の元での自由な利用が保証されています。"
diff -r bc268ea9f984 -r f67b8946bb1b i18n/pt_BR.po
--- a/i18n/pt_BR.po Thu Mar 25 19:06:28 2021 -0400
+++ b/i18n/pt_BR.po Tue Apr 20 11:01:06 2021 -0400
@@ -1,6 +1,6 @@
# Brazilian Portuguese translations for Mercurial
# Traduções do Mercurial para português do Brasil
-# Copyright (C) 2011 Matt Mackall and others
+# Copyright (C) 2011 Olivia Mackall and others
#
# Translators:
# Diego Oliveira
@@ -19269,11 +19269,11 @@
msgstr "(veja https://mercurial-scm.org para mais informações)"
msgid ""
-"Copyright (C) 2005-2018 Matt Mackall and others\n"
+"Copyright (C) 2005-2018 Olivia Mackall and others\n"
"This is free software; see the source for copying conditions. There is NO\n"
"warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
msgstr ""
-"Copyright (C) 2005-2018 Matt Mackall e outros\n"
+"Copyright (C) 2005-2018 Olivia Mackall e outros\n"
"Este software é livre; veja os fontes para condições de cópia. Não\n"
"há garantias, nem mesmo de adequação para qualquer propósito em\n"
"particular.\n"
@@ -31340,11 +31340,11 @@
msgid ""
"Author\n"
"\"\"\"\"\"\"\n"
-"Written by Matt Mackall "
+"Written by Olivia Mackall "
msgstr ""
"Autor\n"
"\"\"\"\"\"\n"
-"Escrito por Matt Mackall "
+"Escrito por Olivia Mackall "
msgid ""
"Resources\n"
@@ -31367,13 +31367,13 @@
msgid ""
"Copying\n"
"\"\"\"\"\"\"\"\n"
-"Copyright (C) 2005-2016 Matt Mackall.\n"
+"Copyright (C) 2005-2016 Olivia Mackall.\n"
"Free use of this software is granted under the terms of the GNU General\n"
"Public License version 2 or any later version."
msgstr ""
"Cópia\n"
"\"\"\"\"\"\n"
-"Copyright (C) 2005-2016 Matt Mackall.\n"
+"Copyright (C) 2005-2016 Olivia Mackall.\n"
"Garante-se livre uso deste software nos termos da licença\n"
"GNU General Public License, versão 2 ou qualquer versão posterior."
@@ -31396,12 +31396,12 @@
"----------------------------------------------------"
msgid ""
-":Author: Matt Mackall \n"
+":Author: Olivia Mackall \n"
":Organization: Mercurial\n"
":Manual section: 1\n"
":Manual group: Mercurial Manual"
msgstr ""
-":Author: Matt Mackall \n"
+":Author: Olivia Mackall \n"
":Organization: Mercurial\n"
":Manual section: 1\n"
":Manual group: Mercurial Manual"
@@ -31581,13 +31581,13 @@
msgid ""
"Copying\n"
"\"\"\"\"\"\"\"\n"
-"Copyright (C) 2005-2018 Matt Mackall.\n"
+"Copyright (C) 2005-2018 Olivia Mackall.\n"
"Free use of this software is granted under the terms of the GNU General\n"
"Public License version 2 or any later version."
msgstr ""
"Cópia\n"
"\"\"\"\"\"\n"
-"Copyright (C) 2005-2018 Matt Mackall.\n"
+"Copyright (C) 2005-2018 Olivia Mackall.\n"
"Garante-se livre uso deste software nos termos da licença\n"
"GNU General Public License, versão 2 ou qualquer versão posterior."
@@ -31629,8 +31629,8 @@
"=====\n"
"Vadim Gelfer "
-msgid "Mercurial was written by Matt Mackall ."
-msgstr "Mercurial foi escrito por Matt Mackall ."
+msgid "Mercurial was written by Olivia Mackall ."
+msgstr "Mercurial foi escrito por Olivia Mackall ."
msgid ""
"See Also\n"
@@ -31645,14 +31645,14 @@
"Copying\n"
"=======\n"
"This manual page is copyright 2006 Vadim Gelfer.\n"
-"Mercurial is copyright 2005-2018 Matt Mackall.\n"
+"Mercurial is copyright 2005-2018 Olivia Mackall.\n"
"Free use of this software is granted under the terms of the GNU General\n"
"Public License version 2 or any later version."
msgstr ""
"Cópia\n"
"=====\n"
"Esta página de manual: copyright 2006 Vadim Gelfer.\n"
-"Mercurial: copyright 2005-2018 Matt Mackall.\n"
+"Mercurial: copyright 2005-2018 Olivia Mackall.\n"
"Garante-se livre uso deste software nos termos da licença\n"
"GNU General Public License, versão 2 ou qualquer versão posterior."
@@ -31928,14 +31928,14 @@
"Copying\n"
"=======\n"
"This manual page is copyright 2005 Bryan O'Sullivan.\n"
-"Mercurial is copyright 2005-2018 Matt Mackall.\n"
+"Mercurial is copyright 2005-2018 Olivia Mackall.\n"
"Free use of this software is granted under the terms of the GNU General\n"
"Public License version 2 or any later version."
msgstr ""
"Cópia\n"
"=====\n"
"Esta página de manual: copyright 2005 Bryan O'Sullivan.\n"
-"Mercurial: copyright 2005-2018 Matt Mackall.\n"
+"Mercurial: copyright 2005-2018 Olivia Mackall.\n"
"Garante-se livre uso deste software nos termos da licença\n"
"GNU General Public License, versão 2 ou qualquer versão posterior."
@@ -41308,13 +41308,13 @@
#~ msgid ""
#~ "Copying\n"
#~ "\"\"\"\"\"\"\"\n"
-#~ "Copyright (C) 2005-2017 Matt Mackall.\n"
+#~ "Copyright (C) 2005-2017 Olivia Mackall.\n"
#~ "Free use of this software is granted under the terms of the GNU General\n"
#~ "Public License version 2 or any later version."
#~ msgstr ""
#~ "Cópia\n"
#~ "\"\"\"\"\"\n"
-#~ "Copyright (C) 2005-2017 Matt Mackall.\n"
+#~ "Copyright (C) 2005-2017 Olivia Mackall.\n"
#~ "Garante-se livre uso deste software nos termos da licença\n"
#~ "GNU General Public License, versão 2 ou qualquer versão posterior."
diff -r bc268ea9f984 -r f67b8946bb1b i18n/ro.po
--- a/i18n/ro.po Thu Mar 25 19:06:28 2021 -0400
+++ b/i18n/ro.po Tue Apr 20 11:01:06 2021 -0400
@@ -1,7 +1,7 @@
# Romanian translation for Mercurial
# Traducerea în limba română pentru Mercurial
#
-# Copyright (C) 2010 Matt Mackall and others
+# Copyright (C) 2010 Olivia Mackall and others
#
#
# Glosar de traduceri
@@ -10032,11 +10032,11 @@
msgstr "(vezi http://mercurial.selenic.com pentru mai multe informații)"
msgid ""
-"Copyright (C) 2005-2011 Matt Mackall and others\n"
+"Copyright (C) 2005-2011 Olivia Mackall and others\n"
"This is free software; see the source for copying conditions. There is NO\n"
"warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
msgstr ""
-"Copyright (C) 2005-2011 Matt Mackall și alții\n"
+"Copyright (C) 2005-2011 Olivia Mackall și alții\n"
"Acesta este software liber; vezi sursa pentru condițiile de copiere.\n"
"Nu există NICIO garanție; nici măcar pentru COMERCIALIZARE sau\n"
"COMPATIBILITATE ÎN ANUMITE SCOPURI.\n"
diff -r bc268ea9f984 -r f67b8946bb1b i18n/ru.po
--- a/i18n/ru.po Thu Mar 25 19:06:28 2021 -0400
+++ b/i18n/ru.po Tue Apr 20 11:01:06 2021 -0400
@@ -1,5 +1,5 @@
# Russian translations for Mercurial package.
-# Copyright (C) 2011 Matt Mackall and others
+# Copyright (C) 2011 Olivia Mackall and others
# This file is distributed under the same license as the Mercurial package.
# === Glossary ===
#
@@ -15590,11 +15590,11 @@
msgstr "(подробнее см. http://mercurial.selenic.com)"
msgid ""
-"Copyright (C) 2005-2014 Matt Mackall and others\n"
+"Copyright (C) 2005-2014 Olivia Mackall and others\n"
"This is free software; see the source for copying conditions. There is NO\n"
"warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
msgstr ""
-"(С) 2005-2014 Matt Mackall и другие.\n"
+"(С) 2005-2014 Olivia Mackall и другие.\n"
"Это свободное ПО; условия распространения см. в исходном коде.\n"
"НИКАКИХ ГАРАНТИЙ НЕ ПРЕДОСТАВЛЯЕТСЯ, в том числе на пригодность для\n"
"коммерческого использования и для решения конкретных задач.\n"
@@ -21807,7 +21807,7 @@
# NOT SURE should this be translated?
msgid ""
-":Author: Matt Mackall \n"
+":Author: Olivia Mackall \n"
":Organization: Mercurial\n"
":Manual section: 1\n"
":Manual group: Mercurial Manual"
@@ -22000,11 +22000,11 @@
msgid ""
"Author\n"
"\"\"\"\"\"\"\n"
-"Written by Matt Mackall "
+"Written by Olivia Mackall "
msgstr ""
"Автор\n"
"\"\"\"\"\"\n"
-"Matt Mackall "
+"Olivia Mackall "
msgid ""
"Resources\n"
@@ -22024,13 +22024,13 @@
msgid ""
"Copying\n"
"\"\"\"\"\"\"\"\n"
-"Copyright (C) 2005-2014 Matt Mackall.\n"
+"Copyright (C) 2005-2014 Olivia Mackall.\n"
"Free use of this software is granted under the terms of the GNU General\n"
"Public License version 2 or any later version."
msgstr ""
"Копирование\n"
"\"\"\"\"\"\"\"\"\"\"\"\n"
-"(C) 2005-2014 Matt Mackall.\n"
+"(C) 2005-2014 Olivia Mackall.\n"
"Свободное использование этого ПО возможно в соответствии с \n"
"Универсальной Общественной Лицензией GNU (GNU GPL) версии 2 или выше."
@@ -22068,8 +22068,8 @@
"=====\n"
"Vadim Gelfer "
-msgid "Mercurial was written by Matt Mackall ."
-msgstr "Mercurial написан Matt Mackall ."
+msgid "Mercurial was written by Olivia Mackall ."
+msgstr "Mercurial написан Olivia Mackall ."
msgid ""
"See Also\n"
@@ -22084,14 +22084,14 @@
"Copying\n"
"=======\n"
"This manual page is copyright 2006 Vadim Gelfer.\n"
-"Mercurial is copyright 2005-2014 Matt Mackall.\n"
+"Mercurial is copyright 2005-2014 Olivia Mackall.\n"
"Free use of this software is granted under the terms of the GNU General\n"
"Public License version 2 or any later version."
msgstr ""
"Копирование\n"
"===========\n"
"Правами на данную страницу обладает (с) 2006 Vadim Gelfer\n"
-"Права на Mercurial принадлежат (с) 2005-2014 Matt Mackall.\n"
+"Права на Mercurial принадлежат (с) 2005-2014 Olivia Mackall.\n"
"Свободное использование этого ПО возможно в соответствии с \n"
"Универсальной Общественной Лицензией GNU (GNU GPL) версии 2 или выше."
@@ -22346,14 +22346,14 @@
"Copying\n"
"=======\n"
"This manual page is copyright 2005 Bryan O'Sullivan.\n"
-"Mercurial is copyright 2005-2014 Matt Mackall.\n"
+"Mercurial is copyright 2005-2014 Olivia Mackall.\n"
"Free use of this software is granted under the terms of the GNU General\n"
"Public License version 2 or any later version."
msgstr ""
"Копирование\n"
"===========\n"
"Правами на данную страницу обладает (с) 2005 Bryan O'Sullivan\n"
-"Права на Mercurial принадлежат (с) 2005-2014 Matt Mackall.\n"
+"Права на Mercurial принадлежат (с) 2005-2014 Olivia Mackall.\n"
"Свободное использование этого ПО возможно в соответствии с \n"
"Универсальной Общественной Лицензией GNU (GNU GPL) версии 2 или выше."
diff -r bc268ea9f984 -r f67b8946bb1b i18n/sv.po
--- a/i18n/sv.po Thu Mar 25 19:06:28 2021 -0400
+++ b/i18n/sv.po Tue Apr 20 11:01:06 2021 -0400
@@ -1,6 +1,6 @@
# Swedish translation for Mercurial
# Svensk översättning för Mercurial
-# Copyright (C) 2009-2012 Matt Mackall and others
+# Copyright (C) 2009-2012 Olivia Mackall and others
#
# Translation dictionary:
#
@@ -12413,11 +12413,11 @@
msgstr "(se http://mercurial.selenic.com för mer information)"
msgid ""
-"Copyright (C) 2005-2012 Matt Mackall and others\n"
+"Copyright (C) 2005-2012 Olivia Mackall and others\n"
"This is free software; see the source for copying conditions. There is NO\n"
"warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
msgstr ""
-"Copyright (C) 2005-2012 Matt Mackall och andra\n"
+"Copyright (C) 2005-2012 Olivia Mackall och andra\n"
"Detta är fri mjukvara; se källkoden för kopieringsvillkor. Det ges INGEN\n"
"garanti; inte ens för SÄLJBARHET eller ATT PASSA FÖR ETT VISST ÄNDAMÅL.\n"
diff -r bc268ea9f984 -r f67b8946bb1b i18n/zh_CN.po
--- a/i18n/zh_CN.po Thu Mar 25 19:06:28 2021 -0400
+++ b/i18n/zh_CN.po Tue Apr 20 11:01:06 2021 -0400
@@ -7409,12 +7409,12 @@
msgid ""
"\n"
-"Copyright (C) 2005-2010 Matt Mackall and others\n"
+"Copyright (C) 2005-2010 Olivia Mackall and others\n"
"This is free software; see the source for copying conditions. There is NO\n"
"warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
msgstr ""
"\n"
-"版权所有 (C) 2005-2010 Matt Mackall 和其他人。\n"
+"版权所有 (C) 2005-2010 Olivia Mackall 和其他人。\n"
"这是自由软件,具体参见版权条款。这里没有任何担保,甚至没有适合\n"
"特定目的的隐含的担保。\n"
diff -r bc268ea9f984 -r f67b8946bb1b i18n/zh_TW.po
--- a/i18n/zh_TW.po Thu Mar 25 19:06:28 2021 -0400
+++ b/i18n/zh_TW.po Tue Apr 20 11:01:06 2021 -0400
@@ -1,5 +1,5 @@
# Traditional Chinese translation for Mercurial
-# Copyright (C) 2009 Matt Mackall and others
+# Copyright (C) 2009 Olivia Mackall and others
# This file is distributed under the same license as the Mercurial package.
# Chia-Huan Wu , 2009.
#
@@ -8191,7 +8191,7 @@
msgstr "\tSee 'hg help urls' for more information."
msgid ""
-"Copyright (C) 2005-2010 Matt Mackall and others\n"
+"Copyright (C) 2005-2010 Olivia Mackall and others\n"
"This is free software; see the source for copying conditions. There is NO\n"
"warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
msgstr ""
diff -r bc268ea9f984 -r f67b8946bb1b mercurial/ancestor.py
--- a/mercurial/ancestor.py Thu Mar 25 19:06:28 2021 -0400
+++ b/mercurial/ancestor.py Tue Apr 20 11:01:06 2021 -0400
@@ -1,6 +1,6 @@
# ancestor.py - generic DAG ancestor algorithm for mercurial
#
-# Copyright 2006 Matt Mackall
+# Copyright 2006 Olivia Mackall
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
diff -r bc268ea9f984 -r f67b8946bb1b mercurial/bdiff.c
--- a/mercurial/bdiff.c Thu Mar 25 19:06:28 2021 -0400
+++ b/mercurial/bdiff.c Tue Apr 20 11:01:06 2021 -0400
@@ -1,7 +1,7 @@
/*
bdiff.c - efficient binary diff extension for Mercurial
- Copyright 2005, 2006 Matt Mackall
+ Copyright 2005, 2006 Olivia Mackall
This software may be used and distributed according to the terms of
the GNU General Public License, incorporated herein by reference.
diff -r bc268ea9f984 -r f67b8946bb1b mercurial/bitmanipulation.h
--- a/mercurial/bitmanipulation.h Thu Mar 25 19:06:28 2021 -0400
+++ b/mercurial/bitmanipulation.h Tue Apr 20 11:01:06 2021 -0400
@@ -5,6 +5,18 @@
#include "compat.h"
+/* Reads a 64 bit integer from big-endian bytes. Assumes that the data is long
+ enough */
+static inline uint64_t getbe64(const char *c)
+{
+ const unsigned char *d = (const unsigned char *)c;
+
+ return ((((uint64_t)d[0]) << 56) | (((uint64_t)d[1]) << 48) |
+ (((uint64_t)d[2]) << 40) | (((uint64_t)d[3]) << 32) |
+ (((uint64_t)d[4]) << 24) | (((uint64_t)d[5]) << 16) |
+ (((uint64_t)d[6]) << 8) | (d[7]));
+}
+
static inline uint32_t getbe32(const char *c)
{
const unsigned char *d = (const unsigned char *)c;
@@ -27,6 +39,20 @@
return ((d[0] << 8) | (d[1]));
}
+/* Writes a 64 bit integer to bytes in a big-endian format.
+ Assumes that the buffer is long enough */
+static inline void putbe64(uint64_t x, char *c)
+{
+ c[0] = (x >> 56) & 0xff;
+ c[1] = (x >> 48) & 0xff;
+ c[2] = (x >> 40) & 0xff;
+ c[3] = (x >> 32) & 0xff;
+ c[4] = (x >> 24) & 0xff;
+ c[5] = (x >> 16) & 0xff;
+ c[6] = (x >> 8) & 0xff;
+ c[7] = (x)&0xff;
+}
+
static inline void putbe32(uint32_t x, char *c)
{
c[0] = (x >> 24) & 0xff;
diff -r bc268ea9f984 -r f67b8946bb1b mercurial/bookmarks.py
--- a/mercurial/bookmarks.py Thu Mar 25 19:06:28 2021 -0400
+++ b/mercurial/bookmarks.py Tue Apr 20 11:01:06 2021 -0400
@@ -27,6 +27,9 @@
txnutil,
util,
)
+from .utils import (
+ urlutil,
+)
# label constants
# until 3.5, bookmarks.current was the advertised name, not
@@ -597,10 +600,10 @@
# try to use an @pathalias suffix
# if an @pathalias already exists, we overwrite (update) it
if path.startswith(b"file:"):
- path = util.url(path).path
+ path = urlutil.url(path).path
for p, u in ui.configitems(b"paths"):
if u.startswith(b"file:"):
- u = util.url(u).path
+ u = urlutil.url(u).path
if path == u:
return b'%s@%s' % (b, p)
@@ -623,7 +626,7 @@
_binaryentry = struct.Struct(b'>20sH')
-def binaryencode(bookmarks):
+def binaryencode(repo, bookmarks):
"""encode a '(bookmark, node)' iterable into a binary stream
the binary format is:
@@ -645,7 +648,7 @@
return b''.join(binarydata)
-def binarydecode(stream):
+def binarydecode(repo, stream):
"""decode a binary stream into an '(bookmark, node)' iterable
the binary format is:
diff -r bc268ea9f984 -r f67b8946bb1b mercurial/branchmap.py
--- a/mercurial/branchmap.py Thu Mar 25 19:06:28 2021 -0400
+++ b/mercurial/branchmap.py Tue Apr 20 11:01:06 2021 -0400
@@ -1,6 +1,6 @@
# branchmap.py - logic to computes, maintain and stores branchmap for local repo
#
-# Copyright 2005-2007 Matt Mackall
+# Copyright 2005-2007 Olivia Mackall
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
@@ -39,6 +39,7 @@
Tuple,
Union,
)
+ from . import localrepo
assert any(
(
@@ -51,6 +52,7 @@
Set,
Tuple,
Union,
+ localrepo,
)
)
@@ -97,7 +99,7 @@
revs.extend(r for r in extrarevs if r <= bcache.tiprev)
else:
# nothing to fall back on, start empty.
- bcache = branchcache()
+ bcache = branchcache(repo)
revs.extend(cl.revs(start=bcache.tiprev + 1))
if revs:
@@ -129,6 +131,7 @@
if rbheads:
rtiprev = max((int(clrev(node)) for node in rbheads))
cache = branchcache(
+ repo,
remotebranchmap,
repo[rtiprev].node(),
rtiprev,
@@ -184,6 +187,7 @@
def __init__(
self,
+ repo,
entries=(),
tipnode=nullid,
tiprev=nullrev,
@@ -191,10 +195,11 @@
closednodes=None,
hasnode=None,
):
- # type: (Union[Dict[bytes, List[bytes]], Iterable[Tuple[bytes, List[bytes]]]], bytes, int, Optional[bytes], Optional[Set[bytes]], Optional[Callable[[bytes], bool]]) -> None
+ # type: (localrepo.localrepository, Union[Dict[bytes, List[bytes]], Iterable[Tuple[bytes, List[bytes]]]], bytes, int, Optional[bytes], Optional[Set[bytes]], Optional[Callable[[bytes], bool]]) -> None
"""hasnode is a function which can be used to verify whether changelog
has a given node or not. If it's not provided, we assume that every node
we have exists in changelog"""
+ self._repo = repo
self.tipnode = tipnode
self.tiprev = tiprev
self.filteredhash = filteredhash
@@ -280,6 +285,7 @@
if len(cachekey) > 2:
filteredhash = bin(cachekey[2])
bcache = cls(
+ repo,
tipnode=last,
tiprev=lrev,
filteredhash=filteredhash,
@@ -386,6 +392,7 @@
def copy(self):
"""return an deep copy of the branchcache object"""
return type(self)(
+ self._repo,
self._entries,
self.tipnode,
self.tiprev,
@@ -564,6 +571,7 @@
# [4 byte hash prefix][4 byte branch name number with sign bit indicating open]
_rbcrecfmt = b'>4sI'
_rbcrecsize = calcsize(_rbcrecfmt)
+_rbcmininc = 64 * _rbcrecsize
_rbcnodelen = 4
_rbcbranchidxmask = 0x7FFFFFFF
_rbccloseflag = 0x80000000
@@ -703,8 +711,10 @@
self._setcachedata(rev, reponode, branchidx)
return b, close
- def setdata(self, branch, rev, node, close):
+ def setdata(self, rev, changelogrevision):
"""add new data information to the cache"""
+ branch, close = changelogrevision.branchinfo
+
if branch in self._namesreverse:
branchidx = self._namesreverse[branch]
else:
@@ -713,7 +723,7 @@
self._namesreverse[branch] = branchidx
if close:
branchidx |= _rbccloseflag
- self._setcachedata(rev, node, branchidx)
+ self._setcachedata(rev, self._repo.changelog.node(rev), branchidx)
# If no cache data were readable (non exists, bad permission, etc)
# the cache was bypassing itself by setting:
#
@@ -728,11 +738,15 @@
if rev == nullrev:
return
rbcrevidx = rev * _rbcrecsize
- if len(self._rbcrevs) < rbcrevidx + _rbcrecsize:
- self._rbcrevs.extend(
- b'\0'
- * (len(self._repo.changelog) * _rbcrecsize - len(self._rbcrevs))
- )
+ requiredsize = rbcrevidx + _rbcrecsize
+ rbccur = len(self._rbcrevs)
+ if rbccur < requiredsize:
+ # bytearray doesn't allocate extra space at least in Python 3.7.
+ # When multiple changesets are added in a row, precise resize would
+ # result in quadratic complexity. Overallocate to compensate by
+ # use the classic doubling technique for dynamic arrays instead.
+ # If there was a gap in the map before, less space will be reserved.
+ self._rbcrevs.extend(b'\0' * max(_rbcmininc, requiredsize))
pack_into(_rbcrecfmt, self._rbcrevs, rbcrevidx, node, branchidx)
self._rbcrevslen = min(self._rbcrevslen, rev)
diff -r bc268ea9f984 -r f67b8946bb1b mercurial/bundle2.py
--- a/mercurial/bundle2.py Thu Mar 25 19:06:28 2021 -0400
+++ b/mercurial/bundle2.py Tue Apr 20 11:01:06 2021 -0400
@@ -177,7 +177,10 @@
url,
util,
)
-from .utils import stringutil
+from .utils import (
+ stringutil,
+ urlutil,
+)
urlerr = util.urlerr
urlreq = util.urlreq
@@ -1598,7 +1601,6 @@
b'digests': tuple(sorted(util.DIGESTS.keys())),
b'remote-changegroup': (b'http', b'https'),
b'hgtagsfnodes': (),
- b'rev-branch-cache': (),
b'phases': (b'heads',),
b'stream': (b'v2',),
}
@@ -1643,6 +1645,9 @@
# Else always advertise support on client, because payload support
# should always be advertised.
+ # b'rev-branch-cache is no longer advertised, but still supported
+ # for legacy clients.
+
return caps
@@ -1769,7 +1774,7 @@
for node in outgoing.ancestorsof:
# Don't compute missing, as this may slow down serving.
fnode = cache.getfnode(node, computemissing=False)
- if fnode is not None:
+ if fnode:
chunks.extend([node, fnode])
if chunks:
@@ -1810,6 +1815,28 @@
return params
+def format_remote_wanted_sidedata(repo):
+ """Formats a repo's wanted sidedata categories into a bytestring for
+ capabilities exchange."""
+ wanted = b""
+ if repo._wanted_sidedata:
+ wanted = b','.join(
+ pycompat.bytestr(c) for c in sorted(repo._wanted_sidedata)
+ )
+ return wanted
+
+
+def read_remote_wanted_sidedata(remote):
+ sidedata_categories = remote.capable(b'exp-wanted-sidedata')
+ return read_wanted_sidedata(sidedata_categories)
+
+
+def read_wanted_sidedata(formatted):
+ if formatted:
+ return set(formatted.split(b','))
+ return set()
+
+
def addpartbundlestream2(bundler, repo, **kwargs):
if not kwargs.get('stream', False):
return
@@ -1955,6 +1982,7 @@
b'version',
b'nbchanges',
b'exp-sidedata',
+ b'exp-wanted-sidedata',
b'treemanifest',
b'targetphase',
),
@@ -1997,11 +2025,15 @@
targetphase = inpart.params.get(b'targetphase')
if targetphase is not None:
extrakwargs['targetphase'] = int(targetphase)
+
+ remote_sidedata = inpart.params.get(b'exp-wanted-sidedata')
+ extrakwargs['sidedata_categories'] = read_wanted_sidedata(remote_sidedata)
+
ret = _processchangegroup(
op,
cg,
tr,
- b'bundle2',
+ op.source,
b'bundle2',
expectedtotal=nbchangesets,
**extrakwargs
@@ -2044,7 +2076,7 @@
raw_url = inpart.params[b'url']
except KeyError:
raise error.Abort(_(b'remote-changegroup: missing "%s" param') % b'url')
- parsed_url = util.url(raw_url)
+ parsed_url = urlutil.url(raw_url)
if parsed_url.scheme not in capabilities[b'remote-changegroup']:
raise error.Abort(
_(b'remote-changegroup does not support %s urls')
@@ -2081,9 +2113,9 @@
cg = exchange.readbundle(op.repo.ui, real_part, raw_url)
if not isinstance(cg, changegroup.cg1unpacker):
raise error.Abort(
- _(b'%s: not a bundle version 1.0') % util.hidepassword(raw_url)
+ _(b'%s: not a bundle version 1.0') % urlutil.hidepassword(raw_url)
)
- ret = _processchangegroup(op, cg, tr, b'bundle2', b'bundle2')
+ ret = _processchangegroup(op, cg, tr, op.source, b'bundle2')
if op.reply is not None:
# This is definitely not the final form of this
# return. But one need to start somewhere.
@@ -2097,7 +2129,7 @@
except error.Abort as e:
raise error.Abort(
_(b'bundle at %s is corrupted:\n%s')
- % (util.hidepassword(raw_url), e.message)
+ % (urlutil.hidepassword(raw_url), e.message)
)
assert not inpart.read()
@@ -2117,7 +2149,7 @@
contains binary encoded (bookmark, node) tuple. If the local state does
not marks the one in the part, a PushRaced exception is raised
"""
- bookdata = bookmarks.binarydecode(inpart)
+ bookdata = bookmarks.binarydecode(op.repo, inpart)
msgstandard = (
b'remote repository changed while pushing - please try again '
@@ -2347,7 +2379,7 @@
When mode is 'records', the information is recorded into the 'bookmarks'
records of the bundle operation. This behavior is suitable for pulling.
"""
- changes = bookmarks.binarydecode(inpart)
+ changes = bookmarks.binarydecode(op.repo, inpart)
pushkeycompat = op.repo.ui.configbool(
b'server', b'bookmarks-pushkey-compat'
@@ -2478,35 +2510,10 @@
@parthandler(b'cache:rev-branch-cache')
def handlerbc(op, inpart):
- """receive a rev-branch-cache payload and update the local cache
-
- The payload is a series of data related to each branch
-
- 1) branch name length
- 2) number of open heads
- 3) number of closed heads
- 4) open heads nodes
- 5) closed heads nodes
- """
- total = 0
- rawheader = inpart.read(rbcstruct.size)
- cache = op.repo.revbranchcache()
- cl = op.repo.unfiltered().changelog
- while rawheader:
- header = rbcstruct.unpack(rawheader)
- total += header[1] + header[2]
- utf8branch = inpart.read(header[0])
- branch = encoding.tolocal(utf8branch)
- for x in pycompat.xrange(header[1]):
- node = inpart.read(20)
- rev = cl.rev(node)
- cache.setdata(branch, rev, node, False)
- for x in pycompat.xrange(header[2]):
- node = inpart.read(20)
- rev = cl.rev(node)
- cache.setdata(branch, rev, node, True)
- rawheader = inpart.read(rbcstruct.size)
- cache.write()
+ """Legacy part, ignored for compatibility with bundles from or
+ for Mercurial before 5.7. Newer Mercurial computes the cache
+ efficiently enough during unbundling that the additional transfer
+ is unnecessary."""
@parthandler(b'pushvars')
@@ -2561,8 +2568,6 @@
for r in repo.revs(b"::%ln", common):
commonnodes.add(cl.node(r))
if commonnodes:
- # XXX: we should only send the filelogs (and treemanifest). user
- # already has the changelog and manifest
packer = changegroup.getbundler(
cgversion,
repo,
@@ -2584,5 +2589,7 @@
part.addparam(b'treemanifest', b'1')
if b'exp-sidedata-flag' in repo.requirements:
part.addparam(b'exp-sidedata', b'1')
+ wanted = format_remote_wanted_sidedata(repo)
+ part.addparam(b'exp-wanted-sidedata', wanted)
return bundler
diff -r bc268ea9f984 -r f67b8946bb1b mercurial/bundlecaches.py
--- a/mercurial/bundlecaches.py Thu Mar 25 19:06:28 2021 -0400
+++ b/mercurial/bundlecaches.py Tue Apr 20 11:01:06 2021 -0400
@@ -9,6 +9,7 @@
from . import (
error,
+ requirements as requirementsmod,
sslutil,
util,
)
@@ -164,7 +165,7 @@
compression = spec
version = b'v1'
# Generaldelta repos require v2.
- if b'generaldelta' in repo.requirements:
+ if requirementsmod.GENERALDELTA_REQUIREMENT in repo.requirements:
version = b'v2'
# Modern compression engines require v2.
if compression not in _bundlespecv1compengines:
diff -r bc268ea9f984 -r f67b8946bb1b mercurial/bundlerepo.py
--- a/mercurial/bundlerepo.py Thu Mar 25 19:06:28 2021 -0400
+++ b/mercurial/bundlerepo.py Tue Apr 20 11:01:06 2021 -0400
@@ -43,6 +43,9 @@
util,
vfs as vfsmod,
)
+from .utils import (
+ urlutil,
+)
class bundlerevlog(revlog.revlog):
@@ -61,7 +64,7 @@
self.repotiprev = n - 1
self.bundlerevs = set() # used by 'bundle()' revset expression
for deltadata in cgunpacker.deltaiter():
- node, p1, p2, cs, deltabase, delta, flags = deltadata
+ node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
size = len(delta)
start = cgunpacker.tell() - size
@@ -175,9 +178,15 @@
class bundlemanifest(bundlerevlog, manifest.manifestrevlog):
def __init__(
- self, opener, cgunpacker, linkmapper, dirlogstarts=None, dir=b''
+ self,
+ nodeconstants,
+ opener,
+ cgunpacker,
+ linkmapper,
+ dirlogstarts=None,
+ dir=b'',
):
- manifest.manifestrevlog.__init__(self, opener, tree=dir)
+ manifest.manifestrevlog.__init__(self, nodeconstants, opener, tree=dir)
bundlerevlog.__init__(
self, opener, self.indexfile, cgunpacker, linkmapper
)
@@ -192,6 +201,7 @@
if d in self._dirlogstarts:
self.bundle.seek(self._dirlogstarts[d])
return bundlemanifest(
+ self.nodeconstants,
self.opener,
self.bundle,
self._linkmapper,
@@ -368,7 +378,9 @@
# consume the header if it exists
self._cgunpacker.manifestheader()
linkmapper = self.unfiltered().changelog.rev
- rootstore = bundlemanifest(self.svfs, self._cgunpacker, linkmapper)
+ rootstore = bundlemanifest(
+ self.nodeconstants, self.svfs, self._cgunpacker, linkmapper
+ )
self.filestart = self._cgunpacker.tell()
return manifest.manifestlog(
@@ -466,7 +478,7 @@
cwd = pathutil.normasprefix(cwd)
if parentpath.startswith(cwd):
parentpath = parentpath[len(cwd) :]
- u = util.url(path)
+ u = urlutil.url(path)
path = u.localpath()
if u.scheme == b'bundle':
s = path.split(b"+", 1)
diff -r bc268ea9f984 -r f67b8946bb1b mercurial/cacheutil.py
--- a/mercurial/cacheutil.py Thu Mar 25 19:06:28 2021 -0400
+++ b/mercurial/cacheutil.py Tue Apr 20 11:01:06 2021 -0400
@@ -1,6 +1,6 @@
# scmutil.py - Mercurial core utility functions
#
-# Copyright Matt Mackall and other
+# Copyright Olivia Mackall and other
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
diff -r bc268ea9f984 -r f67b8946bb1b mercurial/cext/bdiff.c
--- a/mercurial/cext/bdiff.c Thu Mar 25 19:06:28 2021 -0400
+++ b/mercurial/cext/bdiff.c Tue Apr 20 11:01:06 2021 -0400
@@ -1,7 +1,7 @@
/*
bdiff.c - efficient binary diff extension for Mercurial
- Copyright 2005, 2006 Matt Mackall
+ Copyright 2005, 2006 Olivia Mackall
This software may be used and distributed according to the terms of
the GNU General Public License, incorporated herein by reference.
diff -r bc268ea9f984 -r f67b8946bb1b mercurial/cext/charencode.c
--- a/mercurial/cext/charencode.c Thu Mar 25 19:06:28 2021 -0400
+++ b/mercurial/cext/charencode.c Tue Apr 20 11:01:06 2021 -0400
@@ -1,7 +1,7 @@
/*
charencode.c - miscellaneous character encoding
- Copyright 2008 Matt Mackall and others
+ Copyright 2008 Olivia Mackall and others
This software may be used and distributed according to the terms of
the GNU General Public License, incorporated herein by reference.
diff -r bc268ea9f984 -r f67b8946bb1b mercurial/cext/mpatch.c
--- a/mercurial/cext/mpatch.c Thu Mar 25 19:06:28 2021 -0400
+++ b/mercurial/cext/mpatch.c Tue Apr 20 11:01:06 2021 -0400
@@ -14,7 +14,7 @@
allocation of intermediate Python objects. Working memory is about 2x
the total number of hunks.
- Copyright 2005, 2006 Matt Mackall
+ Copyright 2005, 2006 Olivia Mackall
This software may be used and distributed according to the terms
of the GNU General Public License, incorporated herein by reference.
diff -r bc268ea9f984 -r f67b8946bb1b mercurial/cext/osutil.c
--- a/mercurial/cext/osutil.c Thu Mar 25 19:06:28 2021 -0400
+++ b/mercurial/cext/osutil.c Tue Apr 20 11:01:06 2021 -0400
@@ -1,7 +1,7 @@
/*
osutil.c - native operating system services
- Copyright 2007 Matt Mackall and others
+ Copyright 2007 Olivia Mackall and others
This software may be used and distributed according to the terms of
the GNU General Public License, incorporated herein by reference.
@@ -119,7 +119,7 @@
static void listdir_stat_dealloc(PyObject *o)
{
- o->ob_type->tp_free(o);
+ Py_TYPE(o)->tp_free(o);
}
static PyObject *listdir_stat_getitem(PyObject *self, PyObject *key)
diff -r bc268ea9f984 -r f67b8946bb1b mercurial/cext/parsers.c
--- a/mercurial/cext/parsers.c Thu Mar 25 19:06:28 2021 -0400
+++ b/mercurial/cext/parsers.c Tue Apr 20 11:01:06 2021 -0400
@@ -1,7 +1,7 @@
/*
parsers.c - efficient content parsing
- Copyright 2008 Matt Mackall and others
+ Copyright 2008 Olivia Mackall and others
This software may be used and distributed according to the terms of
the GNU General Public License, incorporated herein by reference.
@@ -638,7 +638,7 @@
PyObject *encodedir(PyObject *self, PyObject *args);
PyObject *pathencode(PyObject *self, PyObject *args);
PyObject *lowerencode(PyObject *self, PyObject *args);
-PyObject *parse_index2(PyObject *self, PyObject *args);
+PyObject *parse_index2(PyObject *self, PyObject *args, PyObject *kwargs);
static PyMethodDef methods[] = {
{"pack_dirstate", pack_dirstate, METH_VARARGS, "pack a dirstate\n"},
@@ -646,7 +646,8 @@
"create a set containing non-normal and other parent entries of given "
"dirstate\n"},
{"parse_dirstate", parse_dirstate, METH_VARARGS, "parse a dirstate\n"},
- {"parse_index2", parse_index2, METH_VARARGS, "parse a revlog index\n"},
+ {"parse_index2", (PyCFunction)parse_index2, METH_VARARGS | METH_KEYWORDS,
+ "parse a revlog index\n"},
{"isasciistr", isasciistr, METH_VARARGS, "check if an ASCII string\n"},
{"asciilower", asciilower, METH_VARARGS, "lowercase an ASCII string\n"},
{"asciiupper", asciiupper, METH_VARARGS, "uppercase an ASCII string\n"},
diff -r bc268ea9f984 -r f67b8946bb1b mercurial/cext/pathencode.c
--- a/mercurial/cext/pathencode.c Thu Mar 25 19:06:28 2021 -0400
+++ b/mercurial/cext/pathencode.c Tue Apr 20 11:01:06 2021 -0400
@@ -21,6 +21,7 @@
#include
#include
#include
+#include "pythoncapi_compat.h"
#include "util.h"
@@ -678,7 +679,7 @@
}
assert(PyBytes_Check(ret));
- Py_SIZE(ret) = destlen;
+ Py_SET_SIZE(ret, destlen);
return ret;
}
diff -r bc268ea9f984 -r f67b8946bb1b mercurial/cext/revlog.c
--- a/mercurial/cext/revlog.c Thu Mar 25 19:06:28 2021 -0400
+++ b/mercurial/cext/revlog.c Tue Apr 20 11:01:06 2021 -0400
@@ -1,7 +1,7 @@
/*
parsers.c - efficient content parsing
- Copyright 2008 Matt Mackall and others
+ Copyright 2008 Olivia Mackall and others
This software may be used and distributed according to the terms of
the GNU General Public License, incorporated herein by reference.
@@ -15,6 +15,7 @@
#include
#include
#include
+#include
#include "bitmanipulation.h"
#include "charencode.h"
@@ -98,6 +99,7 @@
int ntlookups; /* # lookups */
int ntmisses; /* # lookups that miss the cache */
int inlined;
+ long hdrsize; /* size of index headers. Differs in v1 v.s. v2 format */
};
static Py_ssize_t index_length(const indexObject *self)
@@ -113,14 +115,19 @@
static int index_find_node(indexObject *self, const char *node);
#if LONG_MAX == 0x7fffffffL
-static const char *const tuple_format = PY23("Kiiiiiis#", "Kiiiiiiy#");
+static const char *const v1_tuple_format = PY23("Kiiiiiis#", "Kiiiiiiy#");
+static const char *const v2_tuple_format = PY23("Kiiiiiis#Ki", "Kiiiiiiy#Ki");
#else
-static const char *const tuple_format = PY23("kiiiiiis#", "kiiiiiiy#");
+static const char *const v1_tuple_format = PY23("kiiiiiis#", "kiiiiiiy#");
+static const char *const v2_tuple_format = PY23("kiiiiiis#ki", "kiiiiiiy#ki");
#endif
/* A RevlogNG v1 index entry is 64 bytes long. */
static const long v1_hdrsize = 64;
+/* A Revlogv2 index entry is 96 bytes long. */
+static const long v2_hdrsize = 96;
+
static void raise_revlog_error(void)
{
PyObject *mod = NULL, *dict = NULL, *errclass = NULL;
@@ -157,7 +164,7 @@
static const char *index_deref(indexObject *self, Py_ssize_t pos)
{
if (pos >= self->length)
- return self->added + (pos - self->length) * v1_hdrsize;
+ return self->added + (pos - self->length) * self->hdrsize;
if (self->inlined && pos > 0) {
if (self->offsets == NULL) {
@@ -174,7 +181,7 @@
return self->offsets[pos];
}
- return (const char *)(self->buf.buf) + pos * v1_hdrsize;
+ return (const char *)(self->buf.buf) + pos * self->hdrsize;
}
/*
@@ -280,8 +287,9 @@
*/
static PyObject *index_get(indexObject *self, Py_ssize_t pos)
{
- uint64_t offset_flags;
- int comp_len, uncomp_len, base_rev, link_rev, parent_1, parent_2;
+ uint64_t offset_flags, sidedata_offset;
+ int comp_len, uncomp_len, base_rev, link_rev, parent_1, parent_2,
+ sidedata_comp_len;
const char *c_node_id;
const char *data;
Py_ssize_t length = index_length(self);
@@ -320,9 +328,19 @@
parent_2 = getbe32(data + 28);
c_node_id = data + 32;
- return Py_BuildValue(tuple_format, offset_flags, comp_len, uncomp_len,
- base_rev, link_rev, parent_1, parent_2, c_node_id,
- self->nodelen);
+ if (self->hdrsize == v1_hdrsize) {
+ return Py_BuildValue(v1_tuple_format, offset_flags, comp_len,
+ uncomp_len, base_rev, link_rev, parent_1,
+ parent_2, c_node_id, self->nodelen);
+ } else {
+ sidedata_offset = getbe64(data + 64);
+ sidedata_comp_len = getbe32(data + 72);
+
+ return Py_BuildValue(v2_tuple_format, offset_flags, comp_len,
+ uncomp_len, base_rev, link_rev, parent_1,
+ parent_2, c_node_id, self->nodelen,
+ sidedata_offset, sidedata_comp_len);
+ }
}
/*
@@ -373,18 +391,31 @@
static PyObject *index_append(indexObject *self, PyObject *obj)
{
- uint64_t offset_flags;
+ uint64_t offset_flags, sidedata_offset;
int rev, comp_len, uncomp_len, base_rev, link_rev, parent_1, parent_2;
- Py_ssize_t c_node_id_len;
+ Py_ssize_t c_node_id_len, sidedata_comp_len;
const char *c_node_id;
char *data;
- if (!PyArg_ParseTuple(obj, tuple_format, &offset_flags, &comp_len,
- &uncomp_len, &base_rev, &link_rev, &parent_1,
- &parent_2, &c_node_id, &c_node_id_len)) {
- PyErr_SetString(PyExc_TypeError, "8-tuple required");
- return NULL;
+ if (self->hdrsize == v1_hdrsize) {
+ if (!PyArg_ParseTuple(obj, v1_tuple_format, &offset_flags,
+ &comp_len, &uncomp_len, &base_rev,
+ &link_rev, &parent_1, &parent_2,
+ &c_node_id, &c_node_id_len)) {
+ PyErr_SetString(PyExc_TypeError, "8-tuple required");
+ return NULL;
+ }
+ } else {
+ if (!PyArg_ParseTuple(obj, v2_tuple_format, &offset_flags,
+ &comp_len, &uncomp_len, &base_rev,
+ &link_rev, &parent_1, &parent_2,
+ &c_node_id, &c_node_id_len,
+ &sidedata_offset, &sidedata_comp_len)) {
+ PyErr_SetString(PyExc_TypeError, "10-tuple required");
+ return NULL;
+ }
}
+
if (c_node_id_len != self->nodelen) {
PyErr_SetString(PyExc_TypeError, "invalid node");
return NULL;
@@ -393,15 +424,15 @@
if (self->new_length == self->added_length) {
size_t new_added_length =
self->added_length ? self->added_length * 2 : 4096;
- void *new_added =
- PyMem_Realloc(self->added, new_added_length * v1_hdrsize);
+ void *new_added = PyMem_Realloc(self->added, new_added_length *
+ self->hdrsize);
if (!new_added)
return PyErr_NoMemory();
self->added = new_added;
self->added_length = new_added_length;
}
rev = self->length + self->new_length;
- data = self->added + v1_hdrsize * self->new_length++;
+ data = self->added + self->hdrsize * self->new_length++;
putbe32(offset_flags >> 32, data);
putbe32(offset_flags & 0xffffffffU, data + 4);
putbe32(comp_len, data + 8);
@@ -411,7 +442,14 @@
putbe32(parent_1, data + 24);
putbe32(parent_2, data + 28);
memcpy(data + 32, c_node_id, c_node_id_len);
+ /* Padding since SHA-1 is only 20 bytes for now */
memset(data + 32 + c_node_id_len, 0, 32 - c_node_id_len);
+ if (self->hdrsize != v1_hdrsize) {
+ putbe64(sidedata_offset, data + 64);
+ putbe32(sidedata_comp_len, data + 72);
+ /* Padding for 96 bytes alignment */
+ memset(data + 76, 0, self->hdrsize - 76);
+ }
if (self->ntinitialized)
nt_insert(&self->nt, c_node_id, rev);
@@ -420,6 +458,56 @@
Py_RETURN_NONE;
}
+/* Replace an existing index entry's sidedata offset and length with new ones.
+ This cannot be used outside of the context of sidedata rewriting,
+ inside the transaction that creates the given revision. */
+static PyObject *index_replace_sidedata_info(indexObject *self, PyObject *args)
+{
+ uint64_t sidedata_offset;
+ int rev;
+ Py_ssize_t sidedata_comp_len;
+ char *data;
+#if LONG_MAX == 0x7fffffffL
+ const char *const sidedata_format = PY23("nKi", "nKi");
+#else
+ const char *const sidedata_format = PY23("nki", "nki");
+#endif
+
+ if (self->hdrsize == v1_hdrsize || self->inlined) {
+ /*
+ There is a bug in the transaction handling when going from an
+ inline revlog to a separate index and data file. Turn it off until
+ it's fixed, since v2 revlogs sometimes get rewritten on exchange.
+ See issue6485.
+ */
+ raise_revlog_error();
+ return NULL;
+ }
+
+ if (!PyArg_ParseTuple(args, sidedata_format, &rev, &sidedata_offset,
+ &sidedata_comp_len))
+ return NULL;
+
+ if (rev < 0 || rev >= index_length(self)) {
+ PyErr_SetString(PyExc_IndexError, "revision outside index");
+ return NULL;
+ }
+ if (rev < self->length) {
+ PyErr_SetString(
+ PyExc_IndexError,
+ "cannot rewrite entries outside of this transaction");
+ return NULL;
+ }
+
+ /* Find the newly added node, offset from the "already on-disk" length
+ */
+ data = self->added + self->hdrsize * (rev - self->length);
+ putbe64(sidedata_offset, data + 64);
+ putbe32(sidedata_comp_len, data + 72);
+
+ Py_RETURN_NONE;
+}
+
static PyObject *index_stats(indexObject *self)
{
PyObject *obj = PyDict_New();
@@ -2563,14 +2651,17 @@
const char *data = (const char *)self->buf.buf;
Py_ssize_t pos = 0;
Py_ssize_t end = self->buf.len;
- long incr = v1_hdrsize;
+ long incr = self->hdrsize;
Py_ssize_t len = 0;
- while (pos + v1_hdrsize <= end && pos >= 0) {
- uint32_t comp_len;
+ while (pos + self->hdrsize <= end && pos >= 0) {
+ uint32_t comp_len, sidedata_comp_len = 0;
/* 3rd element of header is length of compressed inline data */
comp_len = getbe32(data + pos + 8);
- incr = v1_hdrsize + comp_len;
+ if (self->hdrsize == v2_hdrsize) {
+ sidedata_comp_len = getbe32(data + pos + 72);
+ }
+ incr = self->hdrsize + comp_len + sidedata_comp_len;
if (offsets)
offsets[len] = data + pos;
len++;
@@ -2586,11 +2677,13 @@
return len;
}
-static int index_init(indexObject *self, PyObject *args)
+static int index_init(indexObject *self, PyObject *args, PyObject *kwargs)
{
- PyObject *data_obj, *inlined_obj;
+ PyObject *data_obj, *inlined_obj, *revlogv2;
Py_ssize_t size;
+ static char *kwlist[] = {"data", "inlined", "revlogv2", NULL};
+
/* Initialize before argument-checking to avoid index_dealloc() crash.
*/
self->added = NULL;
@@ -2606,7 +2699,9 @@
self->nodelen = 20;
self->nullentry = NULL;
- if (!PyArg_ParseTuple(args, "OO", &data_obj, &inlined_obj))
+ revlogv2 = NULL;
+ if (!PyArg_ParseTupleAndKeywords(args, kwargs, "OO|O", kwlist,
+ &data_obj, &inlined_obj, &revlogv2))
return -1;
if (!PyObject_CheckBuffer(data_obj)) {
PyErr_SetString(PyExc_TypeError,
@@ -2618,8 +2713,22 @@
return -1;
}
- self->nullentry = Py_BuildValue(PY23("iiiiiiis#", "iiiiiiiy#"), 0, 0, 0,
- -1, -1, -1, -1, nullid, self->nodelen);
+ if (revlogv2 && PyObject_IsTrue(revlogv2)) {
+ self->hdrsize = v2_hdrsize;
+ } else {
+ self->hdrsize = v1_hdrsize;
+ }
+
+ if (self->hdrsize == v1_hdrsize) {
+ self->nullentry =
+ Py_BuildValue(PY23("iiiiiiis#", "iiiiiiiy#"), 0, 0, 0, -1,
+ -1, -1, -1, nullid, self->nodelen);
+ } else {
+ self->nullentry =
+ Py_BuildValue(PY23("iiiiiiis#ii", "iiiiiiiy#ii"), 0, 0, 0,
+ -1, -1, -1, -1, nullid, self->nodelen, 0, 0);
+ }
+
if (!self->nullentry)
return -1;
PyObject_GC_UnTrack(self->nullentry);
@@ -2641,11 +2750,11 @@
goto bail;
self->length = len;
} else {
- if (size % v1_hdrsize) {
+ if (size % self->hdrsize) {
PyErr_SetString(PyExc_ValueError, "corrupt index file");
goto bail;
}
- self->length = size / v1_hdrsize;
+ self->length = size / self->hdrsize;
}
return 0;
@@ -2730,6 +2839,8 @@
"compute phases"},
{"reachableroots2", (PyCFunction)reachableroots2, METH_VARARGS,
"reachableroots"},
+ {"replace_sidedata_info", (PyCFunction)index_replace_sidedata_info,
+ METH_VARARGS, "replace an existing index entry with a new value"},
{"headrevs", (PyCFunction)index_headrevs, METH_VARARGS,
"get head revisions"}, /* Can do filtering since 3.2 */
{"headrevsfiltered", (PyCFunction)index_headrevs, METH_VARARGS,
@@ -2756,6 +2867,12 @@
{NULL} /* Sentinel */
};
+static PyMemberDef index_members[] = {
+ {"entry_size", T_LONG, offsetof(indexObject, hdrsize), 0,
+ "size of an index entry"},
+ {NULL} /* Sentinel */
+};
+
PyTypeObject HgRevlogIndex_Type = {
PyVarObject_HEAD_INIT(NULL, 0) /* header */
"parsers.index", /* tp_name */
@@ -2785,7 +2902,7 @@
0, /* tp_iter */
0, /* tp_iternext */
index_methods, /* tp_methods */
- 0, /* tp_members */
+ index_members, /* tp_members */
index_getset, /* tp_getset */
0, /* tp_base */
0, /* tp_dict */
@@ -2797,16 +2914,16 @@
};
/*
- * returns a tuple of the form (index, index, cache) with elements as
+ * returns a tuple of the form (index, cache) with elements as
* follows:
*
- * index: an index object that lazily parses RevlogNG records
+ * index: an index object that lazily parses Revlog (v1 or v2) records
* cache: if data is inlined, a tuple (0, index_file_content), else None
* index_file_content could be a string, or a buffer
*
* added complications are for backwards compatibility
*/
-PyObject *parse_index2(PyObject *self, PyObject *args)
+PyObject *parse_index2(PyObject *self, PyObject *args, PyObject *kwargs)
{
PyObject *cache = NULL;
indexObject *idx;
@@ -2816,7 +2933,7 @@
if (idx == NULL)
goto bail;
- ret = index_init(idx, args);
+ ret = index_init(idx, args, kwargs);
if (ret == -1)
goto bail;
diff -r bc268ea9f984 -r f67b8946bb1b mercurial/changegroup.py
--- a/mercurial/changegroup.py Thu Mar 25 19:06:28 2021 -0400
+++ b/mercurial/changegroup.py Tue Apr 20 11:01:06 2021 -0400
@@ -1,12 +1,13 @@
# changegroup.py - Mercurial changegroup manipulation functions
#
-# Copyright 2006 Matt Mackall
+# Copyright 2006 Olivia Mackall
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
from __future__ import absolute_import
+import collections
import os
import struct
import weakref
@@ -32,6 +33,7 @@
)
from .interfaces import repository
+from .revlogutils import sidedata as sidedatamod
_CHANGEGROUPV1_DELTA_HEADER = struct.Struct(b"20s20s20s20s")
_CHANGEGROUPV2_DELTA_HEADER = struct.Struct(b"20s20s20s20s20s")
@@ -202,7 +204,9 @@
header = self.deltaheader.unpack(headerdata)
delta = readexactly(self._stream, l - self.deltaheadersize)
node, p1, p2, deltabase, cs, flags = self._deltaheader(header, prevnode)
- return (node, p1, p2, cs, deltabase, delta, flags)
+ # cg4 forward-compat
+ sidedata = {}
+ return (node, p1, p2, cs, deltabase, delta, flags, sidedata)
def getchunks(self):
"""returns all the chunks contains in the bundle
@@ -249,7 +253,7 @@
pos = next
yield closechunk()
- def _unpackmanifests(self, repo, revmap, trp, prog):
+ def _unpackmanifests(self, repo, revmap, trp, prog, addrevisioncb=None):
self.callback = prog.increment
# no need to check for empty manifest group here:
# if the result of the merge of 1 and 2 is the same in 3 and 4,
@@ -257,7 +261,8 @@
# be empty during the pull
self.manifestheader()
deltas = self.deltaiter()
- repo.manifestlog.getstorage(b'').addgroup(deltas, revmap, trp)
+ storage = repo.manifestlog.getstorage(b'')
+ storage.addgroup(deltas, revmap, trp, addrevisioncb=addrevisioncb)
prog.complete()
self.callback = None
@@ -269,6 +274,7 @@
url,
targetphase=phases.draft,
expectedtotal=None,
+ sidedata_categories=None,
):
"""Add the changegroup returned by source.read() to this repo.
srctype is a string like 'push', 'pull', or 'unbundle'. url is
@@ -279,9 +285,23 @@
- more heads than before: 1+added heads (2..n)
- fewer heads than before: -1-removed heads (-2..-n)
- number of heads stays the same: 1
+
+ `sidedata_categories` is an optional set of the remote's sidedata wanted
+ categories.
"""
repo = repo.unfiltered()
+ # Only useful if we're adding sidedata categories. If both peers have
+ # the same categories, then we simply don't do anything.
+ if self.version == b'04' and srctype == b'pull':
+ sidedata_helpers = get_sidedata_helpers(
+ repo,
+ sidedata_categories or set(),
+ pull=True,
+ )
+ else:
+ sidedata_helpers = None
+
def csmap(x):
repo.ui.debug(b"add changeset %s\n" % short(x))
return len(cl)
@@ -316,14 +336,16 @@
self.callback = progress.increment
efilesset = set()
- cgnodes = []
+ duprevs = []
- def ondupchangelog(cl, node):
- if cl.rev(node) < clstart:
- cgnodes.append(node)
+ def ondupchangelog(cl, rev):
+ if rev < clstart:
+ duprevs.append(rev)
- def onchangelog(cl, node):
- efilesset.update(cl.readfiles(node))
+ def onchangelog(cl, rev):
+ ctx = cl.changelogrevision(rev)
+ efilesset.update(ctx.files)
+ repo.register_changeset(rev, ctx)
self.changelogheader()
deltas = self.deltaiter()
@@ -331,6 +353,7 @@
deltas,
csmap,
trp,
+ alwayscache=True,
addrevisioncb=onchangelog,
duplicaterevisioncb=ondupchangelog,
):
@@ -348,6 +371,13 @@
efilesset = None
self.callback = None
+ # Keep track of the (non-changelog) revlogs we've updated and their
+ # range of new revisions for sidedata rewrite.
+ # TODO do something more efficient than keeping the reference to
+ # the revlogs, especially memory-wise.
+ touched_manifests = {}
+ touched_filelogs = {}
+
# pull off the manifest group
repo.ui.status(_(b"adding manifests\n"))
# We know that we'll never have more manifests than we had
@@ -355,7 +385,24 @@
progress = repo.ui.makeprogress(
_(b'manifests'), unit=_(b'chunks'), total=changesets
)
- self._unpackmanifests(repo, revmap, trp, progress)
+ on_manifest_rev = None
+ if sidedata_helpers and b'manifest' in sidedata_helpers[1]:
+
+ def on_manifest_rev(manifest, rev):
+ range = touched_manifests.get(manifest)
+ if not range:
+ touched_manifests[manifest] = (rev, rev)
+ else:
+ assert rev == range[1] + 1
+ touched_manifests[manifest] = (range[0], rev)
+
+ self._unpackmanifests(
+ repo,
+ revmap,
+ trp,
+ progress,
+ addrevisioncb=on_manifest_rev,
+ )
needfiles = {}
if repo.ui.configbool(b'server', b'validate'):
@@ -369,12 +416,37 @@
for f, n in pycompat.iteritems(mfest):
needfiles.setdefault(f, set()).add(n)
+ on_filelog_rev = None
+ if sidedata_helpers and b'filelog' in sidedata_helpers[1]:
+
+ def on_filelog_rev(filelog, rev):
+ range = touched_filelogs.get(filelog)
+ if not range:
+ touched_filelogs[filelog] = (rev, rev)
+ else:
+ assert rev == range[1] + 1
+ touched_filelogs[filelog] = (range[0], rev)
+
# process the files
repo.ui.status(_(b"adding file changes\n"))
newrevs, newfiles = _addchangegroupfiles(
- repo, self, revmap, trp, efiles, needfiles
+ repo,
+ self,
+ revmap,
+ trp,
+ efiles,
+ needfiles,
+ addrevisioncb=on_filelog_rev,
)
+ if sidedata_helpers:
+ if b'changelog' in sidedata_helpers[1]:
+ cl.rewrite_sidedata(sidedata_helpers, clstart, clend - 1)
+ for mf, (startrev, endrev) in touched_manifests.items():
+ mf.rewrite_sidedata(sidedata_helpers, startrev, endrev)
+ for fl, (startrev, endrev) in touched_filelogs.items():
+ fl.rewrite_sidedata(sidedata_helpers, startrev, endrev)
+
# making sure the value exists
tr.changes.setdefault(b'changegroup-count-changesets', 0)
tr.changes.setdefault(b'changegroup-count-revisions', 0)
@@ -445,8 +517,12 @@
if added:
phases.registernew(repo, tr, targetphase, added)
if phaseall is not None:
- phases.advanceboundary(repo, tr, phaseall, cgnodes, revs=added)
- cgnodes = []
+ if duprevs:
+ duprevs.extend(added)
+ else:
+ duprevs = added
+ phases.advanceboundary(repo, tr, phaseall, [], revs=duprevs)
+ duprevs = []
if changesets > 0:
@@ -494,7 +570,7 @@
"""
chain = None
for chunkdata in iter(lambda: self.deltachunk(chain), {}):
- # Chunkdata: (node, p1, p2, cs, deltabase, delta, flags)
+ # Chunkdata: (node, p1, p2, cs, deltabase, delta, flags, sidedata)
yield chunkdata
chain = chunkdata[0]
@@ -534,17 +610,44 @@
node, p1, p2, deltabase, cs, flags = headertuple
return node, p1, p2, deltabase, cs, flags
- def _unpackmanifests(self, repo, revmap, trp, prog):
- super(cg3unpacker, self)._unpackmanifests(repo, revmap, trp, prog)
+ def _unpackmanifests(self, repo, revmap, trp, prog, addrevisioncb=None):
+ super(cg3unpacker, self)._unpackmanifests(
+ repo, revmap, trp, prog, addrevisioncb=addrevisioncb
+ )
for chunkdata in iter(self.filelogheader, {}):
# If we get here, there are directory manifests in the changegroup
d = chunkdata[b"filename"]
repo.ui.debug(b"adding %s revisions\n" % d)
deltas = self.deltaiter()
- if not repo.manifestlog.getstorage(d).addgroup(deltas, revmap, trp):
+ if not repo.manifestlog.getstorage(d).addgroup(
+ deltas, revmap, trp, addrevisioncb=addrevisioncb
+ ):
raise error.Abort(_(b"received dir revlog group is empty"))
+class cg4unpacker(cg3unpacker):
+ """Unpacker for cg4 streams.
+
+ cg4 streams add support for exchanging sidedata.
+ """
+
+ version = b'04'
+
+ def deltachunk(self, prevnode):
+ res = super(cg4unpacker, self).deltachunk(prevnode)
+ if not res:
+ return res
+
+ (node, p1, p2, cs, deltabase, delta, flags, _sidedata) = res
+
+ sidedata_raw = getchunk(self._stream)
+ sidedata = {}
+ if len(sidedata_raw) > 0:
+ sidedata = sidedatamod.deserialize_sidedata(sidedata_raw)
+
+ return node, p1, p2, cs, deltabase, delta, flags, sidedata
+
+
class headerlessfixup(object):
def __init__(self, fh, h):
self._h = h
@@ -559,7 +662,7 @@
return readexactly(self._fh, n)
-def _revisiondeltatochunks(delta, headerfn):
+def _revisiondeltatochunks(repo, delta, headerfn):
"""Serialize a revisiondelta to changegroup chunks."""
# The captured revision delta may be encoded as a delta against
@@ -585,6 +688,13 @@
yield prefix
yield data
+ sidedata = delta.sidedata
+ if sidedata is not None:
+ # Need a separate chunk for sidedata to be able to differentiate
+ # "raw delta" length and sidedata length
+ yield chunkheader(len(sidedata))
+ yield sidedata
+
def _sortnodesellipsis(store, nodes, cl, lookup):
"""Sort nodes for changegroup generation."""
@@ -678,7 +788,7 @@
# We failed to resolve a parent for this node, so
# we crash the changegroup construction.
raise error.Abort(
- b'unable to resolve parent while packing %r %r'
+ b"unable to resolve parent while packing '%s' %r"
b' for changeset %r' % (store.indexfile, rev, clrev)
)
@@ -709,6 +819,7 @@
clrevtolocalrev=None,
fullclnodes=None,
precomputedellipsis=None,
+ sidedata_helpers=None,
):
"""Calculate deltas for a set of revisions.
@@ -716,6 +827,8 @@
If topic is not None, progress detail will be generated using this
topic name (e.g. changesets, manifests, etc).
+
+ See `storageutil.emitrevisions` for the doc on `sidedata_helpers`.
"""
if not nodes:
return
@@ -814,6 +927,7 @@
revisiondata=True,
assumehaveparentrevisions=not ellipses,
deltamode=deltamode,
+ sidedata_helpers=sidedata_helpers,
)
for i, revision in enumerate(revisions):
@@ -854,6 +968,7 @@
shallow=False,
ellipsisroots=None,
fullnodes=None,
+ remote_sidedata=None,
):
"""Given a source repo, construct a bundler.
@@ -886,6 +1001,8 @@
nodes. We store this rather than the set of nodes that should be
ellipsis because for very large histories we expect this to be
significantly smaller.
+
+ remote_sidedata is the set of sidedata categories wanted by the remote.
"""
assert oldmatcher
assert matcher
@@ -902,6 +1019,9 @@
if bundlecaps is None:
bundlecaps = set()
self._bundlecaps = bundlecaps
+ if remote_sidedata is None:
+ remote_sidedata = set()
+ self._remote_sidedata = remote_sidedata
self._isshallow = shallow
self._fullclnodes = fullnodes
@@ -928,11 +1048,26 @@
self._verbosenote(_(b'uncompressed size of bundle content:\n'))
size = 0
+ sidedata_helpers = None
+ if self.version == b'04':
+ remote_sidedata = self._remote_sidedata
+ if source == b'strip':
+ # We're our own remote when stripping, get the no-op helpers
+ # TODO a better approach would be for the strip bundle to
+ # correctly advertise its sidedata categories directly.
+ remote_sidedata = repo._wanted_sidedata
+ sidedata_helpers = get_sidedata_helpers(repo, remote_sidedata)
+
clstate, deltas = self._generatechangelog(
- cl, clnodes, generate=changelog
+ cl,
+ clnodes,
+ generate=changelog,
+ sidedata_helpers=sidedata_helpers,
)
for delta in deltas:
- for chunk in _revisiondeltatochunks(delta, self._builddeltaheader):
+ for chunk in _revisiondeltatochunks(
+ self._repo, delta, self._builddeltaheader
+ ):
size += len(chunk)
yield chunk
@@ -977,17 +1112,20 @@
fnodes,
source,
clstate[b'clrevtomanifestrev'],
+ sidedata_helpers=sidedata_helpers,
)
for tree, deltas in it:
if tree:
- assert self.version == b'03'
+ assert self.version in (b'03', b'04')
chunk = _fileheader(tree)
size += len(chunk)
yield chunk
for delta in deltas:
- chunks = _revisiondeltatochunks(delta, self._builddeltaheader)
+ chunks = _revisiondeltatochunks(
+ self._repo, delta, self._builddeltaheader
+ )
for chunk in chunks:
size += len(chunk)
yield chunk
@@ -1002,7 +1140,7 @@
mfdicts = None
if self._ellipses and self._isshallow:
mfdicts = [
- (self._repo.manifestlog[n].read(), lr)
+ (repo.manifestlog[n].read(), lr)
for (n, lr) in pycompat.iteritems(manifests)
]
@@ -1017,6 +1155,7 @@
fastpathlinkrev,
fnodes,
clrevs,
+ sidedata_helpers=sidedata_helpers,
)
for path, deltas in it:
@@ -1025,7 +1164,9 @@
yield h
for delta in deltas:
- chunks = _revisiondeltatochunks(delta, self._builddeltaheader)
+ chunks = _revisiondeltatochunks(
+ self._repo, delta, self._builddeltaheader
+ )
for chunk in chunks:
size += len(chunk)
yield chunk
@@ -1041,7 +1182,9 @@
if clnodes:
repo.hook(b'outgoing', node=hex(clnodes[0]), source=source)
- def _generatechangelog(self, cl, nodes, generate=True):
+ def _generatechangelog(
+ self, cl, nodes, generate=True, sidedata_helpers=None
+ ):
"""Generate data for changelog chunks.
Returns a 2-tuple of a dict containing state and an iterable of
@@ -1050,6 +1193,8 @@
if generate is False, the state will be fully populated and no chunk
stream will be yielded
+
+ See `storageutil.emitrevisions` for the doc on `sidedata_helpers`.
"""
clrevorder = {}
manifests = {}
@@ -1133,6 +1278,7 @@
clrevtolocalrev={},
fullclnodes=self._fullclnodes,
precomputedellipsis=self._precomputedellipsis,
+ sidedata_helpers=sidedata_helpers,
)
return state, gen
@@ -1146,11 +1292,14 @@
fnodes,
source,
clrevtolocalrev,
+ sidedata_helpers=None,
):
"""Returns an iterator of changegroup chunks containing manifests.
`source` is unused here, but is used by extensions like remotefilelog to
change what is sent based in pulls vs pushes, etc.
+
+ See `storageutil.emitrevisions` for the doc on `sidedata_helpers`.
"""
repo = self._repo
mfl = repo.manifestlog
@@ -1240,6 +1389,7 @@
clrevtolocalrev=clrevtolocalrev,
fullclnodes=self._fullclnodes,
precomputedellipsis=self._precomputedellipsis,
+ sidedata_helpers=sidedata_helpers,
)
if not self._oldmatcher.visitdir(store.tree[:-1]):
@@ -1278,6 +1428,7 @@
fastpathlinkrev,
fnodes,
clrevs,
+ sidedata_helpers=None,
):
changedfiles = [
f
@@ -1372,6 +1523,7 @@
clrevtolocalrev=clrevtolocalrev,
fullclnodes=self._fullclnodes,
precomputedellipsis=self._precomputedellipsis,
+ sidedata_helpers=sidedata_helpers,
)
yield fname, deltas
@@ -1388,6 +1540,7 @@
shallow=False,
ellipsisroots=None,
fullnodes=None,
+ remote_sidedata=None,
):
builddeltaheader = lambda d: _CHANGEGROUPV1_DELTA_HEADER.pack(
d.node, d.p1node, d.p2node, d.linknode
@@ -1418,6 +1571,7 @@
shallow=False,
ellipsisroots=None,
fullnodes=None,
+ remote_sidedata=None,
):
builddeltaheader = lambda d: _CHANGEGROUPV2_DELTA_HEADER.pack(
d.node, d.p1node, d.p2node, d.basenode, d.linknode
@@ -1447,6 +1601,7 @@
shallow=False,
ellipsisroots=None,
fullnodes=None,
+ remote_sidedata=None,
):
builddeltaheader = lambda d: _CHANGEGROUPV3_DELTA_HEADER.pack(
d.node, d.p1node, d.p2node, d.basenode, d.linknode, d.flags
@@ -1467,12 +1622,47 @@
)
+def _makecg4packer(
+ repo,
+ oldmatcher,
+ matcher,
+ bundlecaps,
+ ellipses=False,
+ shallow=False,
+ ellipsisroots=None,
+ fullnodes=None,
+ remote_sidedata=None,
+):
+ # Same header func as cg3. Sidedata is in a separate chunk from the delta to
+ # differenciate "raw delta" and sidedata.
+ builddeltaheader = lambda d: _CHANGEGROUPV3_DELTA_HEADER.pack(
+ d.node, d.p1node, d.p2node, d.basenode, d.linknode, d.flags
+ )
+
+ return cgpacker(
+ repo,
+ oldmatcher,
+ matcher,
+ b'04',
+ builddeltaheader=builddeltaheader,
+ manifestsend=closechunk(),
+ bundlecaps=bundlecaps,
+ ellipses=ellipses,
+ shallow=shallow,
+ ellipsisroots=ellipsisroots,
+ fullnodes=fullnodes,
+ remote_sidedata=remote_sidedata,
+ )
+
+
_packermap = {
b'01': (_makecg1packer, cg1unpacker),
# cg2 adds support for exchanging generaldelta
b'02': (_makecg2packer, cg2unpacker),
# cg3 adds support for exchanging revlog flags and treemanifests
b'03': (_makecg3packer, cg3unpacker),
+ # ch4 adds support for exchanging sidedata
+ b'04': (_makecg4packer, cg4unpacker),
}
@@ -1492,11 +1682,9 @@
#
# (or even to push subset of history)
needv03 = True
- if b'exp-sidedata-flag' in repo.requirements:
- needv03 = True
- # don't attempt to use 01/02 until we do sidedata cleaning
- versions.discard(b'01')
- versions.discard(b'02')
+ has_revlogv2 = requirements.REVLOGV2_REQUIREMENT in repo.requirements
+ if not has_revlogv2:
+ versions.discard(b'04')
if not needv03:
versions.discard(b'03')
return versions
@@ -1543,7 +1731,7 @@
# will support. For example, all hg versions that support generaldelta also
# support changegroup 02.
versions = supportedoutgoingversions(repo)
- if b'generaldelta' in repo.requirements:
+ if requirements.GENERALDELTA_REQUIREMENT in repo.requirements:
versions.discard(b'01')
assert versions
return min(versions)
@@ -1559,6 +1747,7 @@
shallow=False,
ellipsisroots=None,
fullnodes=None,
+ remote_sidedata=None,
):
assert version in supportedoutgoingversions(repo)
@@ -1595,6 +1784,7 @@
shallow=shallow,
ellipsisroots=ellipsisroots,
fullnodes=fullnodes,
+ remote_sidedata=remote_sidedata,
)
@@ -1638,8 +1828,15 @@
fastpath=False,
bundlecaps=None,
matcher=None,
+ remote_sidedata=None,
):
- bundler = getbundler(version, repo, bundlecaps=bundlecaps, matcher=matcher)
+ bundler = getbundler(
+ version,
+ repo,
+ bundlecaps=bundlecaps,
+ matcher=matcher,
+ remote_sidedata=remote_sidedata,
+ )
repo = repo.unfiltered()
commonrevs = outgoing.common
@@ -1658,7 +1855,15 @@
return bundler.generate(commonrevs, csets, fastpathlinkrev, source)
-def _addchangegroupfiles(repo, source, revmap, trp, expectedfiles, needfiles):
+def _addchangegroupfiles(
+ repo,
+ source,
+ revmap,
+ trp,
+ expectedfiles,
+ needfiles,
+ addrevisioncb=None,
+):
revisions = 0
files = 0
progress = repo.ui.makeprogress(
@@ -1673,7 +1878,13 @@
o = len(fl)
try:
deltas = source.deltaiter()
- if not fl.addgroup(deltas, revmap, trp):
+ added = fl.addgroup(
+ deltas,
+ revmap,
+ trp,
+ addrevisioncb=addrevisioncb,
+ )
+ if not added:
raise error.Abort(_(b"received file revlog group is empty"))
except error.CensoredBaseError as e:
raise error.Abort(_(b"received delta base is censored: %s") % e)
@@ -1702,3 +1913,25 @@
)
return revisions, files
+
+
+def get_sidedata_helpers(repo, remote_sd_categories, pull=False):
+ # Computers for computing sidedata on-the-fly
+ sd_computers = collections.defaultdict(list)
+ # Computers for categories to remove from sidedata
+ sd_removers = collections.defaultdict(list)
+
+ to_generate = remote_sd_categories - repo._wanted_sidedata
+ to_remove = repo._wanted_sidedata - remote_sd_categories
+ if pull:
+ to_generate, to_remove = to_remove, to_generate
+
+ for revlog_kind, computers in repo._sidedata_computers.items():
+ for category, computer in computers.items():
+ if category in to_generate:
+ sd_computers[revlog_kind].append(computer)
+ if category in to_remove:
+ sd_removers[revlog_kind].append(computer)
+
+ sidedata_helpers = (repo, sd_computers, sd_removers)
+ return sidedata_helpers
diff -r bc268ea9f984 -r f67b8946bb1b mercurial/changelog.py
--- a/mercurial/changelog.py Thu Mar 25 19:06:28 2021 -0400
+++ b/mercurial/changelog.py Tue Apr 20 11:01:06 2021 -0400
@@ -1,6 +1,6 @@
# changelog.py - changelog class for mercurial
#
-# Copyright 2005-2007 Matt Mackall
+# Copyright 2005-2007 Olivia Mackall
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
@@ -191,7 +191,7 @@
# Extensions might modify _defaultextra, so let the constructor below pass
# it in
extra = attr.ib()
- manifest = attr.ib(default=nullid)
+ manifest = attr.ib()
user = attr.ib(default=b'')
date = attr.ib(default=(0, 0))
files = attr.ib(default=attr.Factory(list))
@@ -200,6 +200,7 @@
p1copies = attr.ib(default=None)
p2copies = attr.ib(default=None)
description = attr.ib(default=b'')
+ branchinfo = attr.ib(default=(_defaultextra[b'branch'], False))
class changelogrevision(object):
@@ -218,9 +219,9 @@
'_changes',
)
- def __new__(cls, text, sidedata, cpsd):
+ def __new__(cls, cl, text, sidedata, cpsd):
if not text:
- return _changelogrevision(extra=_defaultextra)
+ return _changelogrevision(extra=_defaultextra, manifest=nullid)
self = super(changelogrevision, cls).__new__(cls)
# We could return here and implement the following as an __init__.
@@ -372,9 +373,14 @@
def description(self):
return encoding.tolocal(self._text[self._offsets[3] + 2 :])
+ @property
+ def branchinfo(self):
+ extra = self.extra
+ return encoding.tolocal(extra.get(b"branch")), b'close' in extra
+
class changelog(revlog.revlog):
- def __init__(self, opener, trypending=False):
+ def __init__(self, opener, trypending=False, concurrencychecker=None):
"""Load a changelog revlog using an opener.
If ``trypending`` is true, we attempt to load the index from a
@@ -383,6 +389,9 @@
revision) data for a transaction that hasn't been finalized yet.
It exists in a separate file to facilitate readers (such as
hooks processes) accessing data before a transaction is finalized.
+
+ ``concurrencychecker`` will be passed to the revlog init function, see
+ the documentation there.
"""
if trypending and opener.exists(b'00changelog.i.a'):
indexfile = b'00changelog.i.a'
@@ -398,6 +407,7 @@
checkambig=True,
mmaplargeindex=True,
persistentnodemap=opener.options.get(b'persistent-nodemap', False),
+ concurrencychecker=concurrencychecker,
)
if self._initempty and (self.version & 0xFFFF == revlog.REVLOGV1):
@@ -418,6 +428,7 @@
self._filteredrevs = frozenset()
self._filteredrevs_hashcache = {}
self._copiesstorage = opener.options.get(b'copies-storage')
+ self.revlog_kind = b'changelog'
@property
def filteredrevs(self):
@@ -497,7 +508,7 @@
if not self._delayed:
revlog.revlog._enforceinlinesize(self, tr, fp)
- def read(self, node):
+ def read(self, nodeorrev):
"""Obtain data from a parsed changelog revision.
Returns a 6-tuple of:
@@ -513,9 +524,9 @@
``changelogrevision`` instead, as it is faster for partial object
access.
"""
- d, s = self._revisiondata(node)
+ d, s = self._revisiondata(nodeorrev)
c = changelogrevision(
- d, s, self._copiesstorage == b'changeset-sidedata'
+ self, d, s, self._copiesstorage == b'changeset-sidedata'
)
return (c.manifest, c.user, c.date, c.files, c.description, c.extra)
@@ -523,14 +534,14 @@
"""Obtain a ``changelogrevision`` for a node or revision."""
text, sidedata = self._revisiondata(nodeorrev)
return changelogrevision(
- text, sidedata, self._copiesstorage == b'changeset-sidedata'
+ self, text, sidedata, self._copiesstorage == b'changeset-sidedata'
)
- def readfiles(self, node):
+ def readfiles(self, nodeorrev):
"""
short version of read that only returns the files modified by the cset
"""
- text = self.revision(node)
+ text = self.revision(nodeorrev)
if not text:
return []
last = text.index(b"\n\n")
@@ -592,21 +603,21 @@
parseddate = b"%s %s" % (parseddate, extra)
l = [hex(manifest), user, parseddate] + sortedfiles + [b"", desc]
text = b"\n".join(l)
- return self.addrevision(
+ rev = self.addrevision(
text, transaction, len(self), p1, p2, sidedata=sidedata, flags=flags
)
+ return self.node(rev)
def branchinfo(self, rev):
"""return the branch name and open/close state of a revision
This function exists because creating a changectx object
just to access this is costly."""
- extra = self.changelogrevision(rev).extra
- return encoding.tolocal(extra.get(b"branch")), b'close' in extra
+ return self.changelogrevision(rev).branchinfo
- def _nodeduplicatecallback(self, transaction, node):
+ def _nodeduplicatecallback(self, transaction, rev):
# keep track of revisions that got "re-added", eg: unbunde of know rev.
#
# We track them in a list to preserve their order from the source bundle
duplicates = transaction.changes.setdefault(b'revduplicates', [])
- duplicates.append(self.rev(node))
+ duplicates.append(rev)
diff -r bc268ea9f984 -r f67b8946bb1b mercurial/cmdutil.py
--- a/mercurial/cmdutil.py Thu Mar 25 19:06:28 2021 -0400
+++ b/mercurial/cmdutil.py Tue Apr 20 11:01:06 2021 -0400
@@ -1,6 +1,6 @@
# cmdutil.py - help for command processing in mercurial
#
-# Copyright 2005-2007 Matt Mackall
+# Copyright 2005-2007 Olivia Mackall
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
@@ -16,6 +16,7 @@
from .node import (
hex,
nullid,
+ nullrev,
short,
)
from .pycompat import (
@@ -1936,12 +1937,12 @@
ui.debug(b'message:\n%s\n' % (message or b''))
if len(parents) == 1:
- parents.append(repo[nullid])
+ parents.append(repo[nullrev])
if opts.get(b'exact'):
if not nodeid or not p1:
raise error.InputError(_(b'not a Mercurial patch'))
p1 = repo[p1]
- p2 = repo[p2 or nullid]
+ p2 = repo[p2 or nullrev]
elif p2:
try:
p1 = repo[p1]
@@ -1951,10 +1952,10 @@
# first parent.
if p1 != parents[0]:
p1 = parents[0]
- p2 = repo[nullid]
+ p2 = repo[nullrev]
except error.RepoError:
p1, p2 = parents
- if p2.node() == nullid:
+ if p2.rev() == nullrev:
ui.warn(
_(
b"warning: import the patch as a normal revision\n"
@@ -2967,20 +2968,6 @@
# Reroute the working copy parent to the new changeset
repo.setparents(newid, nullid)
- mapping = {old.node(): (newid,)}
- obsmetadata = None
- if opts.get(b'note'):
- obsmetadata = {b'note': encoding.fromlocal(opts[b'note'])}
- backup = ui.configbool(b'rewrite', b'backup-bundle')
- scmutil.cleanupnodes(
- repo,
- mapping,
- b'amend',
- metadata=obsmetadata,
- fixphase=True,
- targetphase=commitphase,
- backup=backup,
- )
# Fixing the dirstate because localrepo.commitctx does not update
# it. This is rather convenient because we did not need to update
@@ -3003,6 +2990,21 @@
for f in removedfiles:
dirstate.drop(f)
+ mapping = {old.node(): (newid,)}
+ obsmetadata = None
+ if opts.get(b'note'):
+ obsmetadata = {b'note': encoding.fromlocal(opts[b'note'])}
+ backup = ui.configbool(b'rewrite', b'backup-bundle')
+ scmutil.cleanupnodes(
+ repo,
+ mapping,
+ b'amend',
+ metadata=obsmetadata,
+ fixphase=True,
+ targetphase=commitphase,
+ backup=backup,
+ )
+
return newid
@@ -3774,7 +3776,7 @@
raise error.StateError(state.msg(), hint=state.hint())
for s in statemod._unfinishedstates:
- if s._opname == b'merge' or state._reportonly:
+ if s._opname == b'merge' or s._reportonly:
continue
if s._clearable and s.isunfinished(repo):
util.unlink(repo.vfs.join(s._fname))
diff -r bc268ea9f984 -r f67b8946bb1b mercurial/commands.py
--- a/mercurial/commands.py Thu Mar 25 19:06:28 2021 -0400
+++ b/mercurial/commands.py Tue Apr 20 11:01:06 2021 -0400
@@ -1,6 +1,6 @@
# commands.py - command processing for mercurial
#
-# Copyright 2005-2007 Matt Mackall
+# Copyright 2005-2007 Olivia Mackall
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
@@ -74,8 +74,15 @@
from .utils import (
dateutil,
stringutil,
+ urlutil,
)
+if pycompat.TYPE_CHECKING:
+ from typing import (
+ List,
+ )
+
+
table = {}
table.update(debugcommandsmod.command._table)
@@ -1107,9 +1114,8 @@
transition = b"bad"
state[transition].append(node)
ctx = repo[node]
- ui.status(
- _(b'changeset %d:%s: %s\n') % (ctx.rev(), ctx, transition)
- )
+ summary = cmdutil.format_changeset_summary(ui, ctx, b'bisect')
+ ui.status(_(b'changeset %s: %s\n') % (summary, transition))
hbisect.checkstate(state)
# bisect
nodes, changesets, bgood = hbisect.bisect(repo, state)
@@ -1125,15 +1131,15 @@
nodes, changesets, good = hbisect.bisect(repo, state)
if extend:
if not changesets:
- extendnode = hbisect.extendrange(repo, state, nodes, good)
- if extendnode is not None:
+ extendctx = hbisect.extendrange(repo, state, nodes, good)
+ if extendctx is not None:
ui.write(
- _(b"Extending search to changeset %d:%s\n")
- % (extendnode.rev(), extendnode)
+ _(b"Extending search to changeset %s\n")
+ % cmdutil.format_changeset_summary(ui, extendctx, b'bisect')
)
- state[b'current'] = [extendnode.node()]
+ state[b'current'] = [extendctx.node()]
hbisect.save_state(repo, state)
- return mayupdate(repo, extendnode.node())
+ return mayupdate(repo, extendctx.node())
raise error.StateError(_(b"nothing to extend"))
if changesets == 0:
@@ -1146,12 +1152,13 @@
while size <= changesets:
tests, size = tests + 1, size * 2
rev = repo.changelog.rev(node)
+ summary = cmdutil.format_changeset_summary(ui, repo[rev], b'bisect')
ui.write(
_(
- b"Testing changeset %d:%s "
+ b"Testing changeset %s "
b"(%d changesets remaining, ~%d tests)\n"
)
- % (rev, short(node), changesets, tests)
+ % (summary, changesets, tests)
)
state[b'current'] = [node]
hbisect.save_state(repo, state)
@@ -1524,10 +1531,10 @@
),
]
+ remoteopts,
- _(b'[-f] [-t BUNDLESPEC] [-a] [-r REV]... [--base REV]... FILE [DEST]'),
+ _(b'[-f] [-t BUNDLESPEC] [-a] [-r REV]... [--base REV]... FILE [DEST]...'),
helpcategory=command.CATEGORY_IMPORT_EXPORT,
)
-def bundle(ui, repo, fname, dest=None, **opts):
+def bundle(ui, repo, fname, *dests, **opts):
"""create a bundle file
Generate a bundle file containing data to be transferred to another
@@ -1538,7 +1545,7 @@
all the nodes you specify with --base parameters. Otherwise, hg
will assume the repository has all the nodes in destination, or
default-push/default if no destination is specified, where destination
- is the repository you provide through DEST option.
+ is the repositories you provide through DEST option.
You can change bundle format with the -t/--type option. See
:hg:`help bundlespec` for documentation on this format. By default,
@@ -1583,9 +1590,9 @@
)
if opts.get(b'all'):
- if dest:
+ if dests:
raise error.InputError(
- _(b"--all is incompatible with specifying a destination")
+ _(b"--all is incompatible with specifying destinations")
)
if opts.get(b'base'):
ui.warn(_(b"ignoring --base because --all was specified\n"))
@@ -1598,31 +1605,54 @@
)
if base:
- if dest:
+ if dests:
raise error.InputError(
- _(b"--base is incompatible with specifying a destination")
+ _(b"--base is incompatible with specifying destinations")
)
common = [repo[rev].node() for rev in base]
heads = [repo[r].node() for r in revs] if revs else None
outgoing = discovery.outgoing(repo, common, heads)
+ missing = outgoing.missing
+ excluded = outgoing.excluded
else:
- dest = ui.expandpath(dest or b'default-push', dest or b'default')
- dest, branches = hg.parseurl(dest, opts.get(b'branch'))
- other = hg.peer(repo, opts, dest)
- revs = [repo[r].hex() for r in revs]
- revs, checkout = hg.addbranchrevs(repo, repo, branches, revs)
- heads = revs and pycompat.maplist(repo.lookup, revs) or revs
- outgoing = discovery.findcommonoutgoing(
- repo,
- other,
- onlyheads=heads,
- force=opts.get(b'force'),
- portable=True,
+ missing = set()
+ excluded = set()
+ for path in urlutil.get_push_paths(repo, ui, dests):
+ other = hg.peer(repo, opts, path.rawloc)
+ if revs is not None:
+ hex_revs = [repo[r].hex() for r in revs]
+ else:
+ hex_revs = None
+ branches = (path.branch, [])
+ head_revs, checkout = hg.addbranchrevs(
+ repo, repo, branches, hex_revs
+ )
+ heads = (
+ head_revs
+ and pycompat.maplist(repo.lookup, head_revs)
+ or head_revs
+ )
+ outgoing = discovery.findcommonoutgoing(
+ repo,
+ other,
+ onlyheads=heads,
+ force=opts.get(b'force'),
+ portable=True,
+ )
+ missing.update(outgoing.missing)
+ excluded.update(outgoing.excluded)
+
+ if not missing:
+ scmutil.nochangesfound(ui, repo, not base and excluded)
+ return 1
+
+ if heads:
+ outgoing = discovery.outgoing(
+ repo, missingroots=missing, ancestorsof=heads
)
-
- if not outgoing.missing:
- scmutil.nochangesfound(ui, repo, not base and outgoing.excluded)
- return 1
+ else:
+ outgoing = discovery.outgoing(repo, missingroots=missing)
+ outgoing.excluded = sorted(excluded)
if cgversion == b'01': # bundle1
bversion = b'HG10' + bundlespec.wirecompression
@@ -1648,6 +1678,14 @@
if complevel is not None:
compopts[b'level'] = complevel
+ compthreads = ui.configint(
+ b'experimental', b'bundlecompthreads.' + bundlespec.compression
+ )
+ if compthreads is None:
+ compthreads = ui.configint(b'experimental', b'bundlecompthreads')
+ if compthreads is not None:
+ compopts[b'threads'] = compthreads
+
# Bundling of obsmarker and phases is optional as not all clients
# support the necessary features.
cfg = ui.configbool
@@ -2399,7 +2437,8 @@
To undo marking a destination file as copied, use --forget. With that
option, all given (positional) arguments are unmarked as copies. The
- destination file(s) will be left in place (still tracked).
+ destination file(s) will be left in place (still tracked). Note that
+ :hg:`copy --forget` behaves the same way as :hg:`rename --forget`.
This command takes effect with the next commit by default.
@@ -2550,7 +2589,7 @@
if change:
repo = scmutil.unhidehashlikerevs(repo, [change], b'nowarn')
ctx2 = scmutil.revsingle(repo, change, None)
- ctx1 = ctx2.p1()
+ ctx1 = logcmdutil.diff_parent(ctx2)
elif from_rev or to_rev:
repo = scmutil.unhidehashlikerevs(
repo, [from_rev] + [to_rev], b'nowarn'
@@ -3287,7 +3326,8 @@
)
# checking that newnodes exist because old state files won't have it
elif statedata.get(b'newnodes') is not None:
- statedata[b'newnodes'].append(node)
+ nn = statedata[b'newnodes'] # type: List[bytes]
+ nn.append(node)
# remove state when we complete successfully
if not opts.get(b'dry_run'):
@@ -3821,132 +3861,140 @@
output = []
revs = []
- if source:
- source, branches = hg.parseurl(ui.expandpath(source))
- peer = hg.peer(repo or ui, opts, source) # only pass ui when no repo
- repo = peer.local()
- revs, checkout = hg.addbranchrevs(repo, peer, branches, None)
-
- fm = ui.formatter(b'identify', opts)
- fm.startitem()
-
- if not repo:
- if num or branch or tags:
- raise error.InputError(
- _(b"can't query remote revision number, branch, or tags")
+ peer = None
+ try:
+ if source:
+ source, branches = urlutil.get_unique_pull_path(
+ b'identify', repo, ui, source
)
- if not rev and revs:
- rev = revs[0]
- if not rev:
- rev = b"tip"
-
- remoterev = peer.lookup(rev)
- hexrev = fm.hexfunc(remoterev)
- if default or id:
- output = [hexrev]
- fm.data(id=hexrev)
-
- @util.cachefunc
- def getbms():
- bms = []
-
- if b'bookmarks' in peer.listkeys(b'namespaces'):
- hexremoterev = hex(remoterev)
- bms = [
- bm
- for bm, bmr in pycompat.iteritems(
- peer.listkeys(b'bookmarks')
+ # only pass ui when no repo
+ peer = hg.peer(repo or ui, opts, source)
+ repo = peer.local()
+ revs, checkout = hg.addbranchrevs(repo, peer, branches, None)
+
+ fm = ui.formatter(b'identify', opts)
+ fm.startitem()
+
+ if not repo:
+ if num or branch or tags:
+ raise error.InputError(
+ _(b"can't query remote revision number, branch, or tags")
+ )
+ if not rev and revs:
+ rev = revs[0]
+ if not rev:
+ rev = b"tip"
+
+ remoterev = peer.lookup(rev)
+ hexrev = fm.hexfunc(remoterev)
+ if default or id:
+ output = [hexrev]
+ fm.data(id=hexrev)
+
+ @util.cachefunc
+ def getbms():
+ bms = []
+
+ if b'bookmarks' in peer.listkeys(b'namespaces'):
+ hexremoterev = hex(remoterev)
+ bms = [
+ bm
+ for bm, bmr in pycompat.iteritems(
+ peer.listkeys(b'bookmarks')
+ )
+ if bmr == hexremoterev
+ ]
+
+ return sorted(bms)
+
+ if fm.isplain():
+ if bookmarks:
+ output.extend(getbms())
+ elif default and not ui.quiet:
+ # multiple bookmarks for a single parent separated by '/'
+ bm = b'/'.join(getbms())
+ if bm:
+ output.append(bm)
+ else:
+ fm.data(node=hex(remoterev))
+ if bookmarks or b'bookmarks' in fm.datahint():
+ fm.data(bookmarks=fm.formatlist(getbms(), name=b'bookmark'))
+ else:
+ if rev:
+ repo = scmutil.unhidehashlikerevs(repo, [rev], b'nowarn')
+ ctx = scmutil.revsingle(repo, rev, None)
+
+ if ctx.rev() is None:
+ ctx = repo[None]
+ parents = ctx.parents()
+ taglist = []
+ for p in parents:
+ taglist.extend(p.tags())
+
+ dirty = b""
+ if ctx.dirty(missing=True, merge=False, branch=False):
+ dirty = b'+'
+ fm.data(dirty=dirty)
+
+ hexoutput = [fm.hexfunc(p.node()) for p in parents]
+ if default or id:
+ output = [b"%s%s" % (b'+'.join(hexoutput), dirty)]
+ fm.data(id=b"%s%s" % (b'+'.join(hexoutput), dirty))
+
+ if num:
+ numoutput = [b"%d" % p.rev() for p in parents]
+ output.append(b"%s%s" % (b'+'.join(numoutput), dirty))
+
+ fm.data(
+ parents=fm.formatlist(
+ [fm.hexfunc(p.node()) for p in parents], name=b'node'
)
- if bmr == hexremoterev
- ]
-
- return sorted(bms)
-
- if fm.isplain():
- if bookmarks:
- output.extend(getbms())
- elif default and not ui.quiet:
+ )
+ else:
+ hexoutput = fm.hexfunc(ctx.node())
+ if default or id:
+ output = [hexoutput]
+ fm.data(id=hexoutput)
+
+ if num:
+ output.append(pycompat.bytestr(ctx.rev()))
+ taglist = ctx.tags()
+
+ if default and not ui.quiet:
+ b = ctx.branch()
+ if b != b'default':
+ output.append(b"(%s)" % b)
+
+ # multiple tags for a single parent separated by '/'
+ t = b'/'.join(taglist)
+ if t:
+ output.append(t)
+
# multiple bookmarks for a single parent separated by '/'
- bm = b'/'.join(getbms())
+ bm = b'/'.join(ctx.bookmarks())
if bm:
output.append(bm)
- else:
- fm.data(node=hex(remoterev))
- if bookmarks or b'bookmarks' in fm.datahint():
- fm.data(bookmarks=fm.formatlist(getbms(), name=b'bookmark'))
- else:
- if rev:
- repo = scmutil.unhidehashlikerevs(repo, [rev], b'nowarn')
- ctx = scmutil.revsingle(repo, rev, None)
-
- if ctx.rev() is None:
- ctx = repo[None]
- parents = ctx.parents()
- taglist = []
- for p in parents:
- taglist.extend(p.tags())
-
- dirty = b""
- if ctx.dirty(missing=True, merge=False, branch=False):
- dirty = b'+'
- fm.data(dirty=dirty)
-
- hexoutput = [fm.hexfunc(p.node()) for p in parents]
- if default or id:
- output = [b"%s%s" % (b'+'.join(hexoutput), dirty)]
- fm.data(id=b"%s%s" % (b'+'.join(hexoutput), dirty))
-
- if num:
- numoutput = [b"%d" % p.rev() for p in parents]
- output.append(b"%s%s" % (b'+'.join(numoutput), dirty))
-
- fm.data(
- parents=fm.formatlist(
- [fm.hexfunc(p.node()) for p in parents], name=b'node'
- )
- )
- else:
- hexoutput = fm.hexfunc(ctx.node())
- if default or id:
- output = [hexoutput]
- fm.data(id=hexoutput)
-
- if num:
- output.append(pycompat.bytestr(ctx.rev()))
- taglist = ctx.tags()
-
- if default and not ui.quiet:
- b = ctx.branch()
- if b != b'default':
- output.append(b"(%s)" % b)
-
- # multiple tags for a single parent separated by '/'
- t = b'/'.join(taglist)
- if t:
- output.append(t)
-
- # multiple bookmarks for a single parent separated by '/'
- bm = b'/'.join(ctx.bookmarks())
- if bm:
- output.append(bm)
- else:
- if branch:
- output.append(ctx.branch())
-
- if tags:
- output.extend(taglist)
-
- if bookmarks:
- output.extend(ctx.bookmarks())
-
- fm.data(node=ctx.hex())
- fm.data(branch=ctx.branch())
- fm.data(tags=fm.formatlist(taglist, name=b'tag', sep=b':'))
- fm.data(bookmarks=fm.formatlist(ctx.bookmarks(), name=b'bookmark'))
- fm.context(ctx=ctx)
-
- fm.plain(b"%s\n" % b' '.join(output))
- fm.end()
+ else:
+ if branch:
+ output.append(ctx.branch())
+
+ if tags:
+ output.extend(taglist)
+
+ if bookmarks:
+ output.extend(ctx.bookmarks())
+
+ fm.data(node=ctx.hex())
+ fm.data(branch=ctx.branch())
+ fm.data(tags=fm.formatlist(taglist, name=b'tag', sep=b':'))
+ fm.data(bookmarks=fm.formatlist(ctx.bookmarks(), name=b'bookmark'))
+ fm.context(ctx=ctx)
+
+ fm.plain(b"%s\n" % b' '.join(output))
+ fm.end()
+ finally:
+ if peer:
+ peer.close()
@command(
@@ -4288,22 +4336,22 @@
cmdutil.check_incompatible_arguments(opts, b'subrepos', [b'bundle'])
if opts.get(b'bookmarks'):
- source, branches = hg.parseurl(
- ui.expandpath(source), opts.get(b'branch')
- )
- other = hg.peer(repo, opts, source)
- if b'bookmarks' not in other.listkeys(b'namespaces'):
- ui.warn(_(b"remote doesn't support bookmarks\n"))
- return 0
- ui.pager(b'incoming')
- ui.status(_(b'comparing with %s\n') % util.hidepassword(source))
- return bookmarks.incoming(ui, repo, other)
-
- repo._subtoppath = ui.expandpath(source)
- try:
- return hg.incoming(ui, repo, source, opts)
- finally:
- del repo._subtoppath
+ srcs = urlutil.get_pull_paths(repo, ui, [source], opts.get(b'branch'))
+ for source, branches in srcs:
+ other = hg.peer(repo, opts, source)
+ try:
+ if b'bookmarks' not in other.listkeys(b'namespaces'):
+ ui.warn(_(b"remote doesn't support bookmarks\n"))
+ return 0
+ ui.pager(b'incoming')
+ ui.status(
+ _(b'comparing with %s\n') % urlutil.hidepassword(source)
+ )
+ return bookmarks.incoming(ui, repo, other)
+ finally:
+ other.close()
+
+ return hg.incoming(ui, repo, source, opts)
@command(
@@ -4328,7 +4376,9 @@
Returns 0 on success.
"""
opts = pycompat.byteskwargs(opts)
- hg.peer(ui, opts, ui.expandpath(dest), create=True)
+ path = urlutil.get_clone_path(ui, dest)[1]
+ peer = hg.peer(ui, opts, path, create=True)
+ peer.close()
@command(
@@ -4896,10 +4946,10 @@
+ logopts
+ remoteopts
+ subrepoopts,
- _(b'[-M] [-p] [-n] [-f] [-r REV]... [DEST]'),
+ _(b'[-M] [-p] [-n] [-f] [-r REV]... [DEST]...'),
helpcategory=command.CATEGORY_REMOTE_REPO_MANAGEMENT,
)
-def outgoing(ui, repo, dest=None, **opts):
+def outgoing(ui, repo, *dests, **opts):
"""show changesets not found in the destination
Show changesets not found in the specified destination repository
@@ -4935,47 +4985,24 @@
Returns 0 if there are outgoing changes, 1 otherwise.
"""
- # hg._outgoing() needs to re-resolve the path in order to handle #branch
- # style URLs, so don't overwrite dest.
- path = ui.paths.getpath(dest, default=(b'default-push', b'default'))
- if not path:
- raise error.ConfigError(
- _(b'default repository not configured!'),
- hint=_(b"see 'hg help config.paths'"),
- )
-
opts = pycompat.byteskwargs(opts)
- if opts.get(b'graph'):
- logcmdutil.checkunsupportedgraphflags([], opts)
- o, other = hg._outgoing(ui, repo, dest, opts)
- if not o:
- cmdutil.outgoinghooks(ui, repo, other, opts, o)
- return
-
- revdag = logcmdutil.graphrevs(repo, o, opts)
- ui.pager(b'outgoing')
- displayer = logcmdutil.changesetdisplayer(ui, repo, opts, buffered=True)
- logcmdutil.displaygraph(
- ui, repo, revdag, displayer, graphmod.asciiedges
- )
- cmdutil.outgoinghooks(ui, repo, other, opts, o)
- return 0
-
if opts.get(b'bookmarks'):
- dest = path.pushloc or path.loc
- other = hg.peer(repo, opts, dest)
- if b'bookmarks' not in other.listkeys(b'namespaces'):
- ui.warn(_(b"remote doesn't support bookmarks\n"))
- return 0
- ui.status(_(b'comparing with %s\n') % util.hidepassword(dest))
- ui.pager(b'outgoing')
- return bookmarks.outgoing(ui, repo, other)
-
- repo._subtoppath = path.pushloc or path.loc
- try:
- return hg.outgoing(ui, repo, dest, opts)
- finally:
- del repo._subtoppath
+ for path in urlutil.get_push_paths(repo, ui, dests):
+ dest = path.pushloc or path.loc
+ other = hg.peer(repo, opts, dest)
+ try:
+ if b'bookmarks' not in other.listkeys(b'namespaces'):
+ ui.warn(_(b"remote doesn't support bookmarks\n"))
+ return 0
+ ui.status(
+ _(b'comparing with %s\n') % urlutil.hidepassword(dest)
+ )
+ ui.pager(b'outgoing')
+ return bookmarks.outgoing(ui, repo, other)
+ finally:
+ other.close()
+
+ return hg.outgoing(ui, repo, dests, opts)
@command(
@@ -5113,7 +5140,7 @@
fm = ui.formatter(b'paths', opts)
if fm.isplain():
- hidepassword = util.hidepassword
+ hidepassword = urlutil.hidepassword
else:
hidepassword = bytes
if ui.quiet:
@@ -5244,9 +5271,11 @@
:optupdate: updating working directory is needed or not
:checkout: update destination revision (or None to default destination)
:brev: a name, which might be a bookmark to be activated after updating
+
+ return True if update raise any conflict, False otherwise.
"""
if modheads == 0:
- return
+ return False
if optupdate:
try:
return hg.updatetotally(ui, repo, checkout, brev)
@@ -5268,6 +5297,7 @@
ui.status(_(b"(run 'hg heads' to see heads)\n"))
elif not ui.configbool(b'commands', b'update.requiredest'):
ui.status(_(b"(run 'hg update' to get a working copy)\n"))
+ return False
@command(
@@ -5308,11 +5338,11 @@
),
]
+ remoteopts,
- _(b'[-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]'),
+ _(b'[-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]...'),
helpcategory=command.CATEGORY_REMOTE_REPO_MANAGEMENT,
helpbasic=True,
)
-def pull(ui, repo, source=b"default", **opts):
+def pull(ui, repo, *sources, **opts):
"""pull changes from the specified source
Pull changes from a remote repository to a local one.
@@ -5336,6 +5366,10 @@
If SOURCE is omitted, the 'default' path will be used.
See :hg:`help urls` for more information.
+ If multiple sources are specified, they will be pulled sequentially as if
+ the command was run multiple time. If --update is specify and the command
+ will stop at the first failed --update.
+
Specifying bookmark as ``.`` is equivalent to specifying the active
bookmark's name.
@@ -5350,101 +5384,211 @@
hint = _(b'use hg pull followed by hg update DEST')
raise error.InputError(msg, hint=hint)
- source, branches = hg.parseurl(ui.expandpath(source), opts.get(b'branch'))
- ui.status(_(b'pulling from %s\n') % util.hidepassword(source))
- ui.flush()
- other = hg.peer(repo, opts, source)
- try:
- revs, checkout = hg.addbranchrevs(
- repo, other, branches, opts.get(b'rev')
- )
-
- pullopargs = {}
-
- nodes = None
- if opts.get(b'bookmark') or revs:
- # The list of bookmark used here is the same used to actually update
- # the bookmark names, to avoid the race from issue 4689 and we do
- # all lookup and bookmark queries in one go so they see the same
- # version of the server state (issue 4700).
- nodes = []
- fnodes = []
- revs = revs or []
- if revs and not other.capable(b'lookup'):
- err = _(
- b"other repository doesn't support revision lookup, "
- b"so a rev cannot be specified."
- )
- raise error.Abort(err)
- with other.commandexecutor() as e:
- fremotebookmarks = e.callcommand(
- b'listkeys', {b'namespace': b'bookmarks'}
- )
- for r in revs:
- fnodes.append(e.callcommand(b'lookup', {b'key': r}))
- remotebookmarks = fremotebookmarks.result()
- remotebookmarks = bookmarks.unhexlifybookmarks(remotebookmarks)
- pullopargs[b'remotebookmarks'] = remotebookmarks
- for b in opts.get(b'bookmark', []):
- b = repo._bookmarks.expandname(b)
- if b not in remotebookmarks:
- raise error.InputError(
- _(b'remote bookmark %s not found!') % b
+ sources = urlutil.get_pull_paths(repo, ui, sources, opts.get(b'branch'))
+ for source, branches in sources:
+ ui.status(_(b'pulling from %s\n') % urlutil.hidepassword(source))
+ ui.flush()
+ other = hg.peer(repo, opts, source)
+ update_conflict = None
+ try:
+ revs, checkout = hg.addbranchrevs(
+ repo, other, branches, opts.get(b'rev')
+ )
+
+ pullopargs = {}
+
+ nodes = None
+ if opts.get(b'bookmark') or revs:
+ # The list of bookmark used here is the same used to actually update
+ # the bookmark names, to avoid the race from issue 4689 and we do
+ # all lookup and bookmark queries in one go so they see the same
+ # version of the server state (issue 4700).
+ nodes = []
+ fnodes = []
+ revs = revs or []
+ if revs and not other.capable(b'lookup'):
+ err = _(
+ b"other repository doesn't support revision lookup, "
+ b"so a rev cannot be specified."
+ )
+ raise error.Abort(err)
+ with other.commandexecutor() as e:
+ fremotebookmarks = e.callcommand(
+ b'listkeys', {b'namespace': b'bookmarks'}
+ )
+ for r in revs:
+ fnodes.append(e.callcommand(b'lookup', {b'key': r}))
+ remotebookmarks = fremotebookmarks.result()
+ remotebookmarks = bookmarks.unhexlifybookmarks(remotebookmarks)
+ pullopargs[b'remotebookmarks'] = remotebookmarks
+ for b in opts.get(b'bookmark', []):
+ b = repo._bookmarks.expandname(b)
+ if b not in remotebookmarks:
+ raise error.InputError(
+ _(b'remote bookmark %s not found!') % b
+ )
+ nodes.append(remotebookmarks[b])
+ for i, rev in enumerate(revs):
+ node = fnodes[i].result()
+ nodes.append(node)
+ if rev == checkout:
+ checkout = node
+
+ wlock = util.nullcontextmanager()
+ if opts.get(b'update'):
+ wlock = repo.wlock()
+ with wlock:
+ pullopargs.update(opts.get(b'opargs', {}))
+ modheads = exchange.pull(
+ repo,
+ other,
+ heads=nodes,
+ force=opts.get(b'force'),
+ bookmarks=opts.get(b'bookmark', ()),
+ opargs=pullopargs,
+ confirm=opts.get(b'confirm'),
+ ).cgresult
+
+ # brev is a name, which might be a bookmark to be activated at
+ # the end of the update. In other words, it is an explicit
+ # destination of the update
+ brev = None
+
+ if checkout:
+ checkout = repo.unfiltered().changelog.rev(checkout)
+
+ # order below depends on implementation of
+ # hg.addbranchrevs(). opts['bookmark'] is ignored,
+ # because 'checkout' is determined without it.
+ if opts.get(b'rev'):
+ brev = opts[b'rev'][0]
+ elif opts.get(b'branch'):
+ brev = opts[b'branch'][0]
+ else:
+ brev = branches[0]
+ repo._subtoppath = source
+ try:
+ update_conflict = postincoming(
+ ui, repo, modheads, opts.get(b'update'), checkout, brev
)
- nodes.append(remotebookmarks[b])
- for i, rev in enumerate(revs):
- node = fnodes[i].result()
- nodes.append(node)
- if rev == checkout:
- checkout = node
-
- wlock = util.nullcontextmanager()
- if opts.get(b'update'):
- wlock = repo.wlock()
- with wlock:
- pullopargs.update(opts.get(b'opargs', {}))
- modheads = exchange.pull(
- repo,
- other,
- heads=nodes,
- force=opts.get(b'force'),
- bookmarks=opts.get(b'bookmark', ()),
- opargs=pullopargs,
- confirm=opts.get(b'confirm'),
- ).cgresult
-
- # brev is a name, which might be a bookmark to be activated at
- # the end of the update. In other words, it is an explicit
- # destination of the update
- brev = None
-
- if checkout:
- checkout = repo.unfiltered().changelog.rev(checkout)
-
- # order below depends on implementation of
- # hg.addbranchrevs(). opts['bookmark'] is ignored,
- # because 'checkout' is determined without it.
- if opts.get(b'rev'):
- brev = opts[b'rev'][0]
- elif opts.get(b'branch'):
- brev = opts[b'branch'][0]
- else:
- brev = branches[0]
- repo._subtoppath = source
- try:
- ret = postincoming(
- ui, repo, modheads, opts.get(b'update'), checkout, brev
- )
- except error.FilteredRepoLookupError as exc:
- msg = _(b'cannot update to target: %s') % exc.args[0]
- exc.args = (msg,) + exc.args[1:]
- raise
- finally:
- del repo._subtoppath
-
- finally:
- other.close()
- return ret
+ except error.FilteredRepoLookupError as exc:
+ msg = _(b'cannot update to target: %s') % exc.args[0]
+ exc.args = (msg,) + exc.args[1:]
+ raise
+ finally:
+ del repo._subtoppath
+
+ finally:
+ other.close()
+ # skip the remaining pull source if they are some conflict.
+ if update_conflict:
+ break
+ if update_conflict:
+ return 1
+ else:
+ return 0
+
+
+@command(
+ b'purge|clean',
+ [
+ (b'a', b'abort-on-err', None, _(b'abort if an error occurs')),
+ (b'', b'all', None, _(b'purge ignored files too')),
+ (b'i', b'ignored', None, _(b'purge only ignored files')),
+ (b'', b'dirs', None, _(b'purge empty directories')),
+ (b'', b'files', None, _(b'purge files')),
+ (b'p', b'print', None, _(b'print filenames instead of deleting them')),
+ (
+ b'0',
+ b'print0',
+ None,
+ _(
+ b'end filenames with NUL, for use with xargs'
+ b' (implies -p/--print)'
+ ),
+ ),
+ (b'', b'confirm', None, _(b'ask before permanently deleting files')),
+ ]
+ + cmdutil.walkopts,
+ _(b'hg purge [OPTION]... [DIR]...'),
+ helpcategory=command.CATEGORY_WORKING_DIRECTORY,
+)
+def purge(ui, repo, *dirs, **opts):
+ """removes files not tracked by Mercurial
+
+ Delete files not known to Mercurial. This is useful to test local
+ and uncommitted changes in an otherwise-clean source tree.
+
+ This means that purge will delete the following by default:
+
+ - Unknown files: files marked with "?" by :hg:`status`
+ - Empty directories: in fact Mercurial ignores directories unless
+ they contain files under source control management
+
+ But it will leave untouched:
+
+ - Modified and unmodified tracked files
+ - Ignored files (unless -i or --all is specified)
+ - New files added to the repository (with :hg:`add`)
+
+ The --files and --dirs options can be used to direct purge to delete
+ only files, only directories, or both. If neither option is given,
+ both will be deleted.
+
+ If directories are given on the command line, only files in these
+ directories are considered.
+
+ Be careful with purge, as you could irreversibly delete some files
+ you forgot to add to the repository. If you only want to print the
+ list of files that this program would delete, use the --print
+ option.
+ """
+ opts = pycompat.byteskwargs(opts)
+ cmdutil.check_at_most_one_arg(opts, b'all', b'ignored')
+
+ act = not opts.get(b'print')
+ eol = b'\n'
+ if opts.get(b'print0'):
+ eol = b'\0'
+ act = False # --print0 implies --print
+ if opts.get(b'all', False):
+ ignored = True
+ unknown = True
+ else:
+ ignored = opts.get(b'ignored', False)
+ unknown = not ignored
+
+ removefiles = opts.get(b'files')
+ removedirs = opts.get(b'dirs')
+ confirm = opts.get(b'confirm')
+ if confirm is None:
+ try:
+ extensions.find(b'purge')
+ confirm = False
+ except KeyError:
+ confirm = True
+
+ if not removefiles and not removedirs:
+ removefiles = True
+ removedirs = True
+
+ match = scmutil.match(repo[None], dirs, opts)
+
+ paths = mergemod.purge(
+ repo,
+ match,
+ unknown=unknown,
+ ignored=ignored,
+ removeemptydirs=removedirs,
+ removefiles=removefiles,
+ abortonerror=opts.get(b'abort_on_err'),
+ noop=not act,
+ confirm=confirm,
+ )
+
+ for path in paths:
+ if not act:
+ ui.write(b'%s%s' % (path, eol))
@command(
@@ -5482,11 +5626,11 @@
),
]
+ remoteopts,
- _(b'[-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]'),
+ _(b'[-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]...'),
helpcategory=command.CATEGORY_REMOTE_REPO_MANAGEMENT,
helpbasic=True,
)
-def push(ui, repo, dest=None, **opts):
+def push(ui, repo, *dests, **opts):
"""push changes to the specified destination
Push changesets from the local repository to the specified
@@ -5522,6 +5666,9 @@
Please see :hg:`help urls` for important details about ``ssh://``
URLs. If DESTINATION is omitted, a default path will be used.
+ When passed multiple destinations, push will process them one after the
+ other, but stop should an error occur.
+
.. container:: verbose
The --pushvars option sends strings to the server that become
@@ -5566,75 +5713,89 @@
# this lets simultaneous -r, -b options continue working
opts.setdefault(b'rev', []).append(b"null")
- path = ui.paths.getpath(dest, default=(b'default-push', b'default'))
- if not path:
- raise error.ConfigError(
- _(b'default repository not configured!'),
- hint=_(b"see 'hg help config.paths'"),
- )
- dest = path.pushloc or path.loc
- branches = (path.branch, opts.get(b'branch') or [])
- ui.status(_(b'pushing to %s\n') % util.hidepassword(dest))
- revs, checkout = hg.addbranchrevs(repo, repo, branches, opts.get(b'rev'))
- other = hg.peer(repo, opts, dest)
-
- if revs:
- revs = [repo[r].node() for r in scmutil.revrange(repo, revs)]
- if not revs:
- raise error.InputError(
- _(b"specified revisions evaluate to an empty set"),
- hint=_(b"use different revision arguments"),
- )
- elif path.pushrev:
- # It doesn't make any sense to specify ancestor revisions. So limit
- # to DAG heads to make discovery simpler.
- expr = revsetlang.formatspec(b'heads(%r)', path.pushrev)
- revs = scmutil.revrange(repo, [expr])
- revs = [repo[rev].node() for rev in revs]
- if not revs:
- raise error.InputError(
- _(b'default push revset for path evaluates to an empty set')
- )
- elif ui.configbool(b'commands', b'push.require-revs'):
- raise error.InputError(
- _(b'no revisions specified to push'),
- hint=_(b'did you mean "hg push -r ."?'),
+ some_pushed = False
+ result = 0
+ for path in urlutil.get_push_paths(repo, ui, dests):
+ dest = path.pushloc or path.loc
+ branches = (path.branch, opts.get(b'branch') or [])
+ ui.status(_(b'pushing to %s\n') % urlutil.hidepassword(dest))
+ revs, checkout = hg.addbranchrevs(
+ repo, repo, branches, opts.get(b'rev')
)
-
- repo._subtoppath = dest
- try:
- # push subrepos depth-first for coherent ordering
- c = repo[b'.']
- subs = c.substate # only repos that are committed
- for s in sorted(subs):
- result = c.sub(s).push(opts)
- if result == 0:
- return not result
- finally:
- del repo._subtoppath
-
- opargs = dict(opts.get(b'opargs', {})) # copy opargs since we may mutate it
- opargs.setdefault(b'pushvars', []).extend(opts.get(b'pushvars', []))
-
- pushop = exchange.push(
- repo,
- other,
- opts.get(b'force'),
- revs=revs,
- newbranch=opts.get(b'new_branch'),
- bookmarks=opts.get(b'bookmark', ()),
- publish=opts.get(b'publish'),
- opargs=opargs,
- )
-
- result = not pushop.cgresult
-
- if pushop.bkresult is not None:
- if pushop.bkresult == 2:
- result = 2
- elif not result and pushop.bkresult:
- result = 2
-
+ other = hg.peer(repo, opts, dest)
+
+ try:
+ if revs:
+ revs = [repo[r].node() for r in scmutil.revrange(repo, revs)]
+ if not revs:
+ raise error.InputError(
+ _(b"specified revisions evaluate to an empty set"),
+ hint=_(b"use different revision arguments"),
+ )
+ elif path.pushrev:
+ # It doesn't make any sense to specify ancestor revisions. So limit
+ # to DAG heads to make discovery simpler.
+ expr = revsetlang.formatspec(b'heads(%r)', path.pushrev)
+ revs = scmutil.revrange(repo, [expr])
+ revs = [repo[rev].node() for rev in revs]
+ if not revs:
+ raise error.InputError(
+ _(
+ b'default push revset for path evaluates to an empty set'
+ )
+ )
+ elif ui.configbool(b'commands', b'push.require-revs'):
+ raise error.InputError(
+ _(b'no revisions specified to push'),
+ hint=_(b'did you mean "hg push -r ."?'),
+ )
+
+ repo._subtoppath = dest
+ try:
+ # push subrepos depth-first for coherent ordering
+ c = repo[b'.']
+ subs = c.substate # only repos that are committed
+ for s in sorted(subs):
+ sub_result = c.sub(s).push(opts)
+ if sub_result == 0:
+ return 1
+ finally:
+ del repo._subtoppath
+
+ opargs = dict(
+ opts.get(b'opargs', {})
+ ) # copy opargs since we may mutate it
+ opargs.setdefault(b'pushvars', []).extend(opts.get(b'pushvars', []))
+
+ pushop = exchange.push(
+ repo,
+ other,
+ opts.get(b'force'),
+ revs=revs,
+ newbranch=opts.get(b'new_branch'),
+ bookmarks=opts.get(b'bookmark', ()),
+ publish=opts.get(b'publish'),
+ opargs=opargs,
+ )
+
+ if pushop.cgresult == 0:
+ result = 1
+ elif pushop.cgresult is not None:
+ some_pushed = True
+
+ if pushop.bkresult is not None:
+ if pushop.bkresult == 2:
+ result = 2
+ elif not result and pushop.bkresult:
+ result = 2
+
+ if result:
+ break
+
+ finally:
+ other.close()
+ if result == 0 and not some_pushed:
+ result = 1
return result
@@ -5740,6 +5901,7 @@
@command(
b'rename|move|mv',
[
+ (b'', b'forget', None, _(b'unmark a destination file as renamed')),
(b'A', b'after', None, _(b'record a rename that has already occurred')),
(
b'',
@@ -5771,8 +5933,13 @@
exist in the working directory. If invoked with -A/--after, the
operation is recorded, but no copying is performed.
- This command takes effect at the next commit. To undo a rename
- before that, see :hg:`revert`.
+ To undo marking a destination file as renamed, use --forget. With that
+ option, all given (positional) arguments are unmarked as renames. The
+ destination file(s) will be left in place (still tracked). The source
+ file(s) will not be restored. Note that :hg:`rename --forget` behaves
+ the same way as :hg:`copy --forget`.
+
+ This command takes effect with the next commit by default.
Returns 0 on success, 1 if errors are encountered.
"""
@@ -6083,7 +6250,7 @@
if hint:
ui.warn(hint)
- unresolvedf = list(ms.unresolved())
+ unresolvedf = ms.unresolvedcount()
if not unresolvedf:
ui.status(_(b'(no more unresolved files)\n'))
cmdutil.checkafterresolved(repo)
@@ -7043,7 +7210,12 @@
return
def getincoming():
- source, branches = hg.parseurl(ui.expandpath(b'default'))
+ # XXX We should actually skip this if no default is specified, instead
+ # of passing "default" which will resolve as "./default/" if no default
+ # path is defined.
+ source, branches = urlutil.get_unique_pull_path(
+ b'summary', repo, ui, b'default'
+ )
sbranch = branches[0]
try:
other = hg.peer(repo, {}, source)
@@ -7054,7 +7226,7 @@
revs, checkout = hg.addbranchrevs(repo, other, branches, None)
if revs:
revs = [other.lookup(rev) for rev in revs]
- ui.debug(b'comparing with %s\n' % util.hidepassword(source))
+ ui.debug(b'comparing with %s\n' % urlutil.hidepassword(source))
repo.ui.pushbuffer()
commoninc = discovery.findcommonincoming(repo, other, heads=revs)
repo.ui.popbuffer()
@@ -7066,9 +7238,22 @@
source = sbranch = sother = commoninc = incoming = None
def getoutgoing():
- dest, branches = hg.parseurl(ui.expandpath(b'default-push', b'default'))
- dbranch = branches[0]
- revs, checkout = hg.addbranchrevs(repo, repo, branches, None)
+ # XXX We should actually skip this if no default is specified, instead
+ # of passing "default" which will resolve as "./default/" if no default
+ # path is defined.
+ d = None
+ if b'default-push' in ui.paths:
+ d = b'default-push'
+ elif b'default' in ui.paths:
+ d = b'default'
+ if d is not None:
+ path = urlutil.get_unique_push_path(b'summary', repo, ui, d)
+ dest = path.pushloc or path.loc
+ dbranch = path.branch
+ else:
+ dest = b'default'
+ dbranch = None
+ revs, checkout = hg.addbranchrevs(repo, repo, (dbranch, []), None)
if source != dest:
try:
dother = hg.peer(repo, {}, dest)
@@ -7076,7 +7261,7 @@
if opts.get(b'remote'):
raise
return dest, dbranch, None, None
- ui.debug(b'comparing with %s\n' % util.hidepassword(dest))
+ ui.debug(b'comparing with %s\n' % urlutil.hidepassword(dest))
elif sother is None:
# there is no explicit destination peer, but source one is invalid
return dest, dbranch, None, None
@@ -7101,6 +7286,12 @@
dest = dbranch = dother = outgoing = None
if opts.get(b'remote'):
+ # Help pytype. --remote sets both `needsincoming` and `needsoutgoing`.
+ # The former always sets `sother` (or raises an exception if it can't);
+ # the latter always sets `outgoing`.
+ assert sother is not None
+ assert outgoing is not None
+
t = []
if incoming:
t.append(_(b'1 or more incoming'))
@@ -7412,7 +7603,7 @@
try:
txnname = b'unbundle'
if not isinstance(gen, bundle2.unbundle20):
- txnname = b'unbundle\n%s' % util.hidepassword(url)
+ txnname = b'unbundle\n%s' % urlutil.hidepassword(url)
with repo.transaction(txnname) as tr:
op = bundle2.applybundle(
repo, gen, tr, source=b'unbundle', url=url
@@ -7428,7 +7619,10 @@
)
modheads = bundle2.combinechangegroupresults(op)
- return postincoming(ui, repo, modheads, opts.get('update'), None, None)
+ if postincoming(ui, repo, modheads, opts.get('update'), None, None):
+ return 1
+ else:
+ return 0
@command(
@@ -7708,7 +7902,7 @@
)
license = _(
b"(see https://mercurial-scm.org for more information)\n"
- b"\nCopyright (C) 2005-2021 Matt Mackall and others\n"
+ b"\nCopyright (C) 2005-2021 Olivia Mackall and others\n"
b"This is free software; see the source for copying conditions. "
b"There is NO\nwarranty; "
b"not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
diff -r bc268ea9f984 -r f67b8946bb1b mercurial/commandserver.py
--- a/mercurial/commandserver.py Thu Mar 25 19:06:28 2021 -0400
+++ b/mercurial/commandserver.py Tue Apr 20 11:01:06 2021 -0400
@@ -1,6 +1,6 @@
# commandserver.py - communicate with Mercurial's API over a pipe
#
-# Copyright Matt Mackall
+# Copyright Olivia Mackall
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
diff -r bc268ea9f984 -r f67b8946bb1b mercurial/commit.py
--- a/mercurial/commit.py Thu Mar 25 19:06:28 2021 -0400
+++ b/mercurial/commit.py Tue Apr 20 11:01:06 2021 -0400
@@ -96,6 +96,10 @@
ctx.date(),
extra,
)
+ rev = repo[n].rev()
+ if oldtip != repo.changelog.tiprev():
+ repo.register_changeset(rev, repo.changelog.changelogrevision(rev))
+
xp1, xp2 = p1.hex(), p2 and p2.hex() or b''
repo.hook(
b'pretxncommit',
@@ -108,7 +112,7 @@
targetphase = subrepoutil.newcommitphase(repo.ui, ctx)
# prevent unmarking changesets as public on recommit
- waspublic = oldtip == repo.changelog.tiprev() and not repo[n].phase()
+ waspublic = oldtip == repo.changelog.tiprev() and not repo[rev].phase()
if targetphase and not waspublic:
# retract boundary do not alter parent changeset.
@@ -116,7 +120,7 @@
# be compliant anyway
#
# if minimal phase was 0 we don't need to retract anything
- phases.registernew(repo, tr, targetphase, [repo[n].rev()])
+ phases.registernew(repo, tr, targetphase, [rev])
return n
@@ -357,6 +361,8 @@
elif fparent2 != nullid:
if ms.active() and ms.extras(fname).get(b'filenode-source') == b'other':
fparent1, fparent2 = fparent2, nullid
+ elif ms.active() and ms.extras(fname).get(b'merged') != b'yes':
+ fparent1, fparent2 = fparent1, nullid
# is one parent an ancestor of the other?
else:
fparentancestors = flog.commonancestorsheads(fparent1, fparent2)
diff -r bc268ea9f984 -r f67b8946bb1b mercurial/config.py
--- a/mercurial/config.py Thu Mar 25 19:06:28 2021 -0400
+++ b/mercurial/config.py Tue Apr 20 11:01:06 2021 -0400
@@ -1,6 +1,6 @@
# config.py - configuration parsing for Mercurial
#
-# Copyright 2009 Matt Mackall and others
+# Copyright 2009 Olivia Mackall and others
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
@@ -22,14 +22,19 @@
class config(object):
def __init__(self, data=None):
+ self._current_source_level = 0
self._data = {}
self._unset = []
if data:
for k in data._data:
self._data[k] = data[k].copy()
- self._source = data._source.copy()
- else:
- self._source = util.cowdict()
+ self._current_source_level = data._current_source_level + 1
+
+ def new_source(self):
+ """increment the source counter
+
+ This is used to define source priority when reading"""
+ self._current_source_level += 1
def copy(self):
return config(self)
@@ -48,45 +53,66 @@
yield d
def update(self, src):
- self._source = self._source.preparewrite()
+ current_level = self._current_source_level
+ current_level += 1
+ max_level = self._current_source_level
for s, n in src._unset:
ds = self._data.get(s, None)
if ds is not None and n in ds:
self._data[s] = ds.preparewrite()
del self._data[s][n]
- del self._source[(s, n)]
for s in src:
ds = self._data.get(s, None)
if ds:
self._data[s] = ds.preparewrite()
else:
self._data[s] = util.cowsortdict()
- self._data[s].update(src._data[s])
- self._source.update(src._source)
+ for k, v in src._data[s].items():
+ value, source, level = v
+ level += current_level
+ max_level = max(level, current_level)
+ self._data[s][k] = (value, source, level)
+ self._current_source_level = max_level
+
+ def _get(self, section, item):
+ return self._data.get(section, {}).get(item)
def get(self, section, item, default=None):
- return self._data.get(section, {}).get(item, default)
+ result = self._get(section, item)
+ if result is None:
+ return default
+ return result[0]
- def backup(self, section, item):
+ def backup(self, section, key):
"""return a tuple allowing restore to reinstall a previous value
The main reason we need it is because it handles the "no data" case.
"""
try:
- value = self._data[section][item]
- source = self.source(section, item)
- return (section, item, value, source)
+ item = self._data[section][key]
except KeyError:
- return (section, item)
+ return (section, key)
+ else:
+ return (section, key) + item
def source(self, section, item):
- return self._source.get((section, item), b"")
+ result = self._get(section, item)
+ if result is None:
+ return b""
+ return result[1]
+
+ def level(self, section, item):
+ result = self._get(section, item)
+ if result is None:
+ return None
+ return result[2]
def sections(self):
return sorted(self._data.keys())
def items(self, section):
- return list(pycompat.iteritems(self._data.get(section, {})))
+ items = pycompat.iteritems(self._data.get(section, {}))
+ return [(k, v[0]) for (k, v) in items]
def set(self, section, item, value, source=b""):
if pycompat.ispy3:
@@ -103,26 +129,31 @@
self._data[section] = util.cowsortdict()
else:
self._data[section] = self._data[section].preparewrite()
- self._data[section][item] = value
- if source:
- self._source = self._source.preparewrite()
- self._source[(section, item)] = source
+ self._data[section][item] = (value, source, self._current_source_level)
+
+ def alter(self, section, key, new_value):
+ """alter a value without altering its source or level
+
+ This method is meant to be used by `ui.fixconfig` only."""
+ item = self._data[section][key]
+ size = len(item)
+ new_item = (new_value,) + item[1:]
+ assert len(new_item) == size
+ self._data[section][key] = new_item
def restore(self, data):
"""restore data returned by self.backup"""
- self._source = self._source.preparewrite()
- if len(data) == 4:
+ if len(data) != 2:
# restore old data
- section, item, value, source = data
+ section, key = data[:2]
+ item = data[2:]
self._data[section] = self._data[section].preparewrite()
- self._data[section][item] = value
- self._source[(section, item)] = source
+ self._data[section][key] = item
else:
# no data before, remove everything
section, item = data
if section in self._data:
self._data[section].pop(item, None)
- self._source.pop((section, item), None)
def parse(self, src, data, sections=None, remap=None, include=None):
sectionre = util.re.compile(br'\[([^\[]+)\]')
@@ -206,6 +237,7 @@
raise error.ConfigError(message, (b"%s:%d" % (src, line)))
def read(self, path, fp=None, sections=None, remap=None):
+ self.new_source()
if not fp:
fp = util.posixfile(path, b'rb')
assert (
@@ -220,6 +252,8 @@
def include(rel, remap, sections):
abs = os.path.normpath(os.path.join(dir, rel))
self.read(abs, remap=remap, sections=sections)
+ # anything after the include has a higher level
+ self.new_source()
self.parse(
path, fp.read(), sections=sections, remap=remap, include=include
diff -r bc268ea9f984 -r f67b8946bb1b mercurial/configitems.py
--- a/mercurial/configitems.py Thu Mar 25 19:06:28 2021 -0400
+++ b/mercurial/configitems.py Tue Apr 20 11:01:06 2021 -0400
@@ -570,11 +570,21 @@
default=0,
)
coreconfigitem(
+ b'convert',
+ b'svn.dangerous-set-commit-dates',
+ default=False,
+)
+coreconfigitem(
b'debug',
b'dirstate.delaywrite',
default=0,
)
coreconfigitem(
+ b'debug',
+ b'revlog.verifyposition.changelog',
+ default=b'',
+)
+coreconfigitem(
b'defaults',
b'.*',
default=None,
@@ -610,6 +620,12 @@
b'check-relroot',
default=False,
)
+# Track copy information for all file, not just "added" one (very slow)
+coreconfigitem(
+ b'devel',
+ b'copy-tracing.trace-all-files',
+ default=False,
+)
coreconfigitem(
b'devel',
b'default-date',
@@ -689,6 +705,11 @@
)
coreconfigitem(
b'devel',
+ b'copy-tracing.multi-thread',
+ default=True,
+)
+coreconfigitem(
+ b'devel',
b'debug.extensions',
default=False,
)
@@ -716,6 +737,14 @@
b'discovery.grow-sample',
default=True,
)
+# When discovery.grow-sample.dynamic is True, the default, the sample size is
+# adapted to the shape of the undecided set (it is set to the max of:
+# , len(roots(undecided)), len(heads(undecided)
+coreconfigitem(
+ b'devel',
+ b'discovery.grow-sample.dynamic',
+ default=True,
+)
# discovery.grow-sample.rate control the rate at which the sample grow
coreconfigitem(
b'devel',
@@ -729,8 +758,26 @@
b'discovery.randomize',
default=True,
)
+# Control the initial size of the discovery sample
+coreconfigitem(
+ b'devel',
+ b'discovery.sample-size',
+ default=200,
+)
+# Control the initial size of the discovery for initial change
+coreconfigitem(
+ b'devel',
+ b'discovery.sample-size.initial',
+ default=100,
+)
_registerdiffopts(section=b'diff')
coreconfigitem(
+ b'diff',
+ b'merge',
+ default=False,
+ experimental=True,
+)
+coreconfigitem(
b'email',
b'bcc',
default=None,
@@ -827,6 +874,31 @@
)
coreconfigitem(
b'experimental',
+ b'bundlecompthreads',
+ default=None,
+)
+coreconfigitem(
+ b'experimental',
+ b'bundlecompthreads.bzip2',
+ default=None,
+)
+coreconfigitem(
+ b'experimental',
+ b'bundlecompthreads.gzip',
+ default=None,
+)
+coreconfigitem(
+ b'experimental',
+ b'bundlecompthreads.none',
+ default=None,
+)
+coreconfigitem(
+ b'experimental',
+ b'bundlecompthreads.zstd',
+ default=None,
+)
+coreconfigitem(
+ b'experimental',
b'changegroup3',
default=False,
)
@@ -1235,7 +1307,7 @@
coreconfigitem(
b'format',
b'revlog-compression',
- default=lambda: [b'zlib'],
+ default=lambda: [b'zstd', b'zlib'],
alias=[(b'experimental', b'format.compression')],
)
coreconfigitem(
@@ -1253,10 +1325,36 @@
b'usestore',
default=True,
)
+
+
+def _persistent_nodemap_default():
+ """compute `use-persistent-nodemap` default value
+
+ The feature is disabled unless a fast implementation is available.
+ """
+ from . import policy
+
+ return policy.importrust('revlog') is not None
+
+
coreconfigitem(
b'format',
b'use-persistent-nodemap',
+ default=_persistent_nodemap_default,
+)
+# TODO needs to grow a docket file to at least store the last offset of the data
+# file when rewriting sidedata.
+# Will also need a way of dealing with garbage data if we allow rewriting
+# *existing* sidedata.
+# Exchange-wise, we will also need to do something more efficient than keeping
+# references to the affected revlogs, especially memory-wise when rewriting
+# sidedata.
+# Also... compress the sidedata? (this should be coming very soon)
+coreconfigitem(
+ b'format',
+ b'exp-revlogv2.2',
default=False,
+ experimental=True,
)
coreconfigitem(
b'format',
diff -r bc268ea9f984 -r f67b8946bb1b mercurial/context.py
--- a/mercurial/context.py Thu Mar 25 19:06:28 2021 -0400
+++ b/mercurial/context.py Tue Apr 20 11:01:06 2021 -0400
@@ -1,6 +1,6 @@
# context.py - changeset and file context objects for mercurial
#
-# Copyright 2006, 2007 Matt Mackall
+# Copyright 2006, 2007 Olivia Mackall
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
@@ -2599,6 +2599,7 @@
b'flags': flags,
b'copied': copied,
}
+ util.clearcachedproperty(self, b'_manifest')
def filectx(self, path, filelog=None):
return overlayworkingfilectx(
@@ -2884,7 +2885,7 @@
# "1 < len(self._parents)" can't be used for checking
# existence of the 2nd parent, because "memctx._parents" is
# explicitly initialized by the list, of which length is 2.
- if p2.node() != nullid:
+ if p2.rev() != nullrev:
man2 = p2.manifest()
managing = lambda f: f in man1 or f in man2
else:
@@ -2902,7 +2903,7 @@
return scmutil.status(modified, added, removed, [], [], [], [])
def parents(self):
- if self._parents[1].node() == nullid:
+ if self._parents[1].rev() == nullrev:
return [self._parents[0]]
return self._parents
@@ -2999,7 +3000,7 @@
parents = [repo[p] for p in parents if p is not None]
parents = parents[:]
while len(parents) < 2:
- parents.append(repo[nullid])
+ parents.append(repo[nullrev])
p1, p2 = self._parents = parents
# sanity check to ensure that the reused manifest parents are
@@ -3051,7 +3052,7 @@
# "1 < len(self._parents)" can't be used for checking
# existence of the 2nd parent, because "metadataonlyctx._parents" is
# explicitly initialized by the list, of which length is 2.
- if p2.node() != nullid:
+ if p2.rev() != nullrev:
man2 = p2.manifest()
managing = lambda f: f in man1 or f in man2
else:
diff -r bc268ea9f984 -r f67b8946bb1b mercurial/copies.py
--- a/mercurial/copies.py Thu Mar 25 19:06:28 2021 -0400
+++ b/mercurial/copies.py Tue Apr 20 11:01:06 2021 -0400
@@ -1,7 +1,7 @@
# coding: utf8
# copies.py - copy detection for Mercurial
#
-# Copyright 2008 Matt Mackall
+# Copyright 2008 Olivia Mackall
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
@@ -59,14 +59,13 @@
# Cases 1, 3, and 5 are then removed by _filter().
for k, v in list(t.items()):
- # remove copies from files that didn't exist
- if v not in src:
+ if k == v: # case 3
del t[k]
- # remove criss-crossed copies
- elif k in src and v in dst:
+ elif v not in src: # case 5
+ # remove copies from files that didn't exist
del t[k]
- # remove copies to files that were then removed
- elif k not in dst:
+ elif k not in dst: # case 1
+ # remove copies to files that were then removed
del t[k]
@@ -150,16 +149,24 @@
# optimization, since the ctx.files() for a merge commit is not correct for
# this comparison.
forwardmissingmatch = match
- if b.p1() == a and b.p2().node() == nullid:
+ if b.p1() == a and b.p2().rev() == nullrev:
filesmatcher = matchmod.exact(b.files())
forwardmissingmatch = matchmod.intersectmatchers(match, filesmatcher)
- missing = _computeforwardmissing(a, b, match=forwardmissingmatch)
+ if repo.ui.configbool(b'devel', b'copy-tracing.trace-all-files'):
+ missing = list(b.walk(match))
+ # _computeforwardmissing(a, b, match=forwardmissingmatch)
+ if debug:
+ dbg(b'debug.copies: searching all files: %d\n' % len(missing))
+ else:
+ missing = _computeforwardmissing(a, b, match=forwardmissingmatch)
+ if debug:
+ dbg(
+ b'debug.copies: missing files to search: %d\n'
+ % len(missing)
+ )
ancestrycontext = a._repo.changelog.ancestors([b.rev()], inclusive=True)
- if debug:
- dbg(b'debug.copies: missing files to search: %d\n' % len(missing))
-
for f in sorted(missing):
if debug:
dbg(b'debug.copies: tracing file: %s\n' % f)
@@ -267,6 +274,7 @@
revs = cl.findmissingrevs(common=[a.rev()], heads=[b.rev()])
roots = set()
has_graph_roots = False
+ multi_thread = repo.ui.configbool(b'devel', b'copy-tracing.multi-thread')
# iterate over `only(B, A)`
for r in revs:
@@ -314,7 +322,13 @@
children_count[p] += 1
revinfo = _revinfo_getter(repo, match)
return _combine_changeset_copies(
- revs, children_count, b.rev(), revinfo, match, isancestor
+ revs,
+ children_count,
+ b.rev(),
+ revinfo,
+ match,
+ isancestor,
+ multi_thread,
)
else:
# When not using side-data, we will process the edges "from" the parent.
@@ -339,7 +353,7 @@
def _combine_changeset_copies(
- revs, children_count, targetrev, revinfo, match, isancestor
+ revs, children_count, targetrev, revinfo, match, isancestor, multi_thread
):
"""combine the copies information for each item of iterrevs
@@ -356,7 +370,7 @@
if rustmod is not None:
final_copies = rustmod.combine_changeset_copies(
- list(revs), children_count, targetrev, revinfo, isancestor
+ list(revs), children_count, targetrev, revinfo, multi_thread
)
else:
isancestor = cached_is_ancestor(isancestor)
@@ -427,7 +441,11 @@
# potential filelog related behavior.
assert parent == 2
current_copies = _merge_copies_dict(
- newcopies, current_copies, isancestor, changes
+ newcopies,
+ current_copies,
+ isancestor,
+ changes,
+ current_rev,
)
all_copies[current_rev] = current_copies
@@ -449,7 +467,7 @@
PICK_EITHER = 2
-def _merge_copies_dict(minor, major, isancestor, changes):
+def _merge_copies_dict(minor, major, isancestor, changes, current_merge):
"""merge two copies-mapping together, minor and major
In case of conflict, value from "major" will be picked.
@@ -467,39 +485,75 @@
if other is None:
minor[dest] = value
else:
- pick = _compare_values(changes, isancestor, dest, other, value)
- if pick == PICK_MAJOR:
+ pick, overwrite = _compare_values(
+ changes, isancestor, dest, other, value
+ )
+ if overwrite:
+ if pick == PICK_MAJOR:
+ minor[dest] = (current_merge, value[1])
+ else:
+ minor[dest] = (current_merge, other[1])
+ elif pick == PICK_MAJOR:
minor[dest] = value
return minor
def _compare_values(changes, isancestor, dest, minor, major):
- """compare two value within a _merge_copies_dict loop iteration"""
+ """compare two value within a _merge_copies_dict loop iteration
+
+ return (pick, overwrite).
+
+ - pick is one of PICK_MINOR, PICK_MAJOR or PICK_EITHER
+ - overwrite is True if pick is a return of an ambiguity that needs resolution.
+ """
major_tt, major_value = major
minor_tt, minor_value = minor
- # evacuate some simple case first:
if major_tt == minor_tt:
# if it comes from the same revision it must be the same value
assert major_value == minor_value
- return PICK_EITHER
- elif major[1] == minor[1]:
- return PICK_EITHER
-
- # actual merging needed: content from "major" wins, unless it is older than
- # the branch point or there is a merge
- elif changes is not None and major[1] is None and dest in changes.salvaged:
- return PICK_MINOR
- elif changes is not None and minor[1] is None and dest in changes.salvaged:
- return PICK_MAJOR
- elif changes is not None and dest in changes.merged:
- return PICK_MAJOR
- elif not isancestor(major_tt, minor_tt):
- if major[1] is not None:
- return PICK_MAJOR
- elif isancestor(minor_tt, major_tt):
- return PICK_MAJOR
- return PICK_MINOR
+ return PICK_EITHER, False
+ elif (
+ changes is not None
+ and minor_value is not None
+ and major_value is None
+ and dest in changes.salvaged
+ ):
+ # In this case, a deletion was reverted, the "alive" value overwrite
+ # the deleted one.
+ return PICK_MINOR, True
+ elif (
+ changes is not None
+ and major_value is not None
+ and minor_value is None
+ and dest in changes.salvaged
+ ):
+ # In this case, a deletion was reverted, the "alive" value overwrite
+ # the deleted one.
+ return PICK_MAJOR, True
+ elif isancestor(minor_tt, major_tt):
+ if changes is not None and dest in changes.merged:
+ # change to dest happened on the branch without copy-source change,
+ # so both source are valid and "major" wins.
+ return PICK_MAJOR, True
+ else:
+ return PICK_MAJOR, False
+ elif isancestor(major_tt, minor_tt):
+ if changes is not None and dest in changes.merged:
+ # change to dest happened on the branch without copy-source change,
+ # so both source are valid and "major" wins.
+ return PICK_MAJOR, True
+ else:
+ return PICK_MINOR, False
+ elif minor_value is None:
+ # in case of conflict, the "alive" side wins.
+ return PICK_MAJOR, True
+ elif major_value is None:
+ # in case of conflict, the "alive" side wins.
+ return PICK_MINOR, True
+ else:
+ # in case of conflict where both side are alive, major wins.
+ return PICK_MAJOR, True
def _revinfo_getter_extra(repo):
@@ -650,22 +704,28 @@
def _backwardrenames(a, b, match):
+ """find renames from a to b"""
if a._repo.ui.config(b'experimental', b'copytrace') == b'off':
return {}
+ # We don't want to pass in "match" here, since that would filter
+ # the destination by it. Since we're reversing the copies, we want
+ # to filter the source instead.
+ copies = _forwardcopies(b, a)
+ return _reverse_renames(copies, a, match)
+
+
+def _reverse_renames(copies, dst, match):
+ """given copies to context 'dst', finds renames from that context"""
# Even though we're not taking copies into account, 1:n rename situations
# can still exist (e.g. hg cp a b; hg mv a c). In those cases we
# arbitrarily pick one of the renames.
- # We don't want to pass in "match" here, since that would filter
- # the destination by it. Since we're reversing the copies, we want
- # to filter the source instead.
- f = _forwardcopies(b, a)
r = {}
- for k, v in sorted(pycompat.iteritems(f)):
+ for k, v in sorted(pycompat.iteritems(copies)):
if match and not match(v):
continue
# remove copies
- if v in a:
+ if v in dst:
continue
r[v] = k
return r
@@ -701,9 +761,17 @@
base = None
if a.rev() != nullrev:
base = x
+ x_copies = _forwardcopies(a, x)
+ y_copies = _forwardcopies(a, y, base, match=match)
+ same_keys = set(x_copies) & set(y_copies)
+ for k in same_keys:
+ if x_copies.get(k) == y_copies.get(k):
+ del x_copies[k]
+ del y_copies[k]
+ x_backward_renames = _reverse_renames(x_copies, x, match)
copies = _chain(
- _backwardrenames(x, a, match=match),
- _forwardcopies(a, y, base, match=match),
+ x_backward_renames,
+ y_copies,
)
_filter(x, y, copies)
return copies
@@ -1042,11 +1110,17 @@
b" discovered dir src: '%s' -> dst: '%s'\n" % (d, dirmove[d])
)
+ # Sort the directories in reverse order, so we find children first
+ # For example, if dir1/ was renamed to dir2/, and dir1/subdir1/
+ # was renamed to dir2/subdir2/, we want to move dir1/subdir1/file
+ # to dir2/subdir2/file (not dir2/subdir1/file)
+ dirmove_children_first = sorted(dirmove, reverse=True)
+
movewithdir = {}
# check unaccounted nonoverlapping files against directory moves
for f in addedfilesfn():
if f not in fullcopy:
- for d in dirmove:
+ for d in dirmove_children_first:
if f.startswith(d):
# new file added in a directory that was moved, move it
df = dirmove[d] + f[len(d) :]
@@ -1220,6 +1294,15 @@
by merge.update().
"""
new_copies = pathcopies(base, ctx)
- _filter(wctx.p1(), wctx, new_copies)
+ parent = wctx.p1()
+ _filter(parent, wctx, new_copies)
+ # Extra filtering to drop copy information for files that existed before
+ # the graft. This is to handle the case of grafting a rename onto a commit
+ # that already has the rename. Otherwise the presence of copy information
+ # would result in the creation of an empty commit where we would prefer to
+ # not create one.
+ for dest, __ in list(new_copies.items()):
+ if dest in parent:
+ del new_copies[dest]
for dst, src in pycompat.iteritems(new_copies):
wctx[dst].markcopied(src)
diff -r bc268ea9f984 -r f67b8946bb1b mercurial/dagop.py
--- a/mercurial/dagop.py Thu Mar 25 19:06:28 2021 -0400
+++ b/mercurial/dagop.py Tue Apr 20 11:01:06 2021 -0400
@@ -1,6 +1,6 @@
# dagop.py - graph ancestry and topology algorithm for revset
#
-# Copyright 2010 Matt Mackall
+# Copyright 2010 Olivia Mackall
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
diff -r bc268ea9f984 -r f67b8946bb1b mercurial/debugcommands.py
--- a/mercurial/debugcommands.py Thu Mar 25 19:06:28 2021 -0400
+++ b/mercurial/debugcommands.py Tue Apr 20 11:01:06 2021 -0400
@@ -1,6 +1,6 @@
# debugcommands.py - command processing for debug* commands
#
-# Copyright 2005-2016 Matt Mackall
+# Copyright 2005-2016 Olivia Mackall
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
@@ -9,6 +9,7 @@
import codecs
import collections
+import contextlib
import difflib
import errno
import glob
@@ -69,6 +70,7 @@
pycompat,
registrar,
repair,
+ repoview,
revlog,
revset,
revsetlang,
@@ -96,6 +98,7 @@
dateutil,
procutil,
stringutil,
+ urlutil,
)
from .revlogutils import (
@@ -345,7 +348,7 @@
def showchunks(named):
ui.write(b"\n%s%s\n" % (indent_string, named))
for deltadata in gen.deltaiter():
- node, p1, p2, cs, deltabase, delta, flags = deltadata
+ node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
ui.write(
b"%s%s %s %s %s %s %d\n"
% (
@@ -371,7 +374,7 @@
raise error.Abort(_(b'use debugbundle2 for this file'))
gen.changelogheader()
for deltadata in gen.deltaiter():
- node, p1, p2, cs, deltabase, delta, flags = deltadata
+ node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
ui.write(b"%s%s\n" % (indent_string, hex(node)))
@@ -470,27 +473,47 @@
"""lists the capabilities of a remote peer"""
opts = pycompat.byteskwargs(opts)
peer = hg.peer(ui, opts, path)
- caps = peer.capabilities()
- ui.writenoi18n(b'Main capabilities:\n')
- for c in sorted(caps):
- ui.write(b' %s\n' % c)
- b2caps = bundle2.bundle2caps(peer)
- if b2caps:
- ui.writenoi18n(b'Bundle2 capabilities:\n')
- for key, values in sorted(pycompat.iteritems(b2caps)):
- ui.write(b' %s\n' % key)
- for v in values:
- ui.write(b' %s\n' % v)
-
-
-@command(b'debugchangedfiles', [], b'REV')
-def debugchangedfiles(ui, repo, rev):
+ try:
+ caps = peer.capabilities()
+ ui.writenoi18n(b'Main capabilities:\n')
+ for c in sorted(caps):
+ ui.write(b' %s\n' % c)
+ b2caps = bundle2.bundle2caps(peer)
+ if b2caps:
+ ui.writenoi18n(b'Bundle2 capabilities:\n')
+ for key, values in sorted(pycompat.iteritems(b2caps)):
+ ui.write(b' %s\n' % key)
+ for v in values:
+ ui.write(b' %s\n' % v)
+ finally:
+ peer.close()
+
+
+@command(
+ b'debugchangedfiles',
+ [
+ (
+ b'',
+ b'compute',
+ False,
+ b"compute information instead of reading it from storage",
+ ),
+ ],
+ b'REV',
+)
+def debugchangedfiles(ui, repo, rev, **opts):
"""list the stored files changes for a revision"""
ctx = scmutil.revsingle(repo, rev, None)
- sd = repo.changelog.sidedata(ctx.rev())
- files_block = sd.get(sidedata.SD_FILES)
- if files_block is not None:
- files = metadata.decode_files_sidedata(sd)
+ files = None
+
+ if opts['compute']:
+ files = metadata.compute_all_files_changes(ctx)
+ else:
+ sd = repo.changelog.sidedata(ctx.rev())
+ files_block = sd.get(sidedata.SD_FILES)
+ if files_block is not None:
+ files = metadata.decode_files_sidedata(sd)
+ if files is not None:
for f in sorted(files.touched):
if f in files.added:
action = b"added"
@@ -964,20 +987,111 @@
),
(b'', b'rev', [], b'restrict discovery to this set of revs'),
(b'', b'seed', b'12323', b'specify the random seed use for discovery'),
+ (
+ b'',
+ b'local-as-revs',
+ b"",
+ b'treat local has having these revisions only',
+ ),
+ (
+ b'',
+ b'remote-as-revs',
+ b"",
+ b'use local as remote, with only these these revisions',
+ ),
]
- + cmdutil.remoteopts,
+ + cmdutil.remoteopts
+ + cmdutil.formatteropts,
_(b'[--rev REV] [OTHER]'),
)
def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
- """runs the changeset discovery protocol in isolation"""
+ """runs the changeset discovery protocol in isolation
+
+ The local peer can be "replaced" by a subset of the local repository by
+ using the `--local-as-revs` flag. Int he same way, usual `remote` peer can
+ be "replaced" by a subset of the local repository using the
+ `--local-as-revs` flag. This is useful to efficiently debug pathological
+ discovery situation.
+
+ The following developer oriented config are relevant for people playing with this command:
+
+ * devel.discovery.exchange-heads=True
+
+ If False, the discovery will not start with
+ remote head fetching and local head querying.
+
+ * devel.discovery.grow-sample=True
+
+ If False, the sample size used in set discovery will not be increased
+ through the process
+
+ * devel.discovery.grow-sample.dynamic=True
+
+ When discovery.grow-sample.dynamic is True, the default, the sample size is
+ adapted to the shape of the undecided set (it is set to the max of:
+ , len(roots(undecided)), len(heads(undecided)
+
+ * devel.discovery.grow-sample.rate=1.05
+
+ the rate at which the sample grow
+
+ * devel.discovery.randomize=True
+
+ If andom sampling during discovery are deterministic. It is meant for
+ integration tests.
+
+ * devel.discovery.sample-size=200
+
+ Control the initial size of the discovery sample
+
+ * devel.discovery.sample-size.initial=100
+
+ Control the initial size of the discovery for initial change
+ """
opts = pycompat.byteskwargs(opts)
- remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
- remote = hg.peer(repo, opts, remoteurl)
- ui.status(_(b'comparing with %s\n') % util.hidepassword(remoteurl))
+ unfi = repo.unfiltered()
+
+ # setup potential extra filtering
+ local_revs = opts[b"local_as_revs"]
+ remote_revs = opts[b"remote_as_revs"]
# make sure tests are repeatable
random.seed(int(opts[b'seed']))
+ if not remote_revs:
+
+ remoteurl, branches = urlutil.get_unique_pull_path(
+ b'debugdiscovery', repo, ui, remoteurl
+ )
+ remote = hg.peer(repo, opts, remoteurl)
+ ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(remoteurl))
+ else:
+ branches = (None, [])
+ remote_filtered_revs = scmutil.revrange(
+ unfi, [b"not (::(%s))" % remote_revs]
+ )
+ remote_filtered_revs = frozenset(remote_filtered_revs)
+
+ def remote_func(x):
+ return remote_filtered_revs
+
+ repoview.filtertable[b'debug-discovery-remote-filter'] = remote_func
+
+ remote = repo.peer()
+ remote._repo = remote._repo.filtered(b'debug-discovery-remote-filter')
+
+ if local_revs:
+ local_filtered_revs = scmutil.revrange(
+ unfi, [b"not (::(%s))" % local_revs]
+ )
+ local_filtered_revs = frozenset(local_filtered_revs)
+
+ def local_func(x):
+ return local_filtered_revs
+
+ repoview.filtertable[b'debug-discovery-local-filter'] = local_func
+ repo = repo.filtered(b'debug-discovery-local-filter')
+
data = {}
if opts.get(b'old'):
@@ -1014,8 +1128,21 @@
remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
localrevs = opts[b'rev']
- with util.timedcm('debug-discovery') as t:
- common, hds = doit(localrevs, remoterevs)
+
+ fm = ui.formatter(b'debugdiscovery', opts)
+ if fm.strict_format:
+
+ @contextlib.contextmanager
+ def may_capture_output():
+ ui.pushbuffer()
+ yield
+ data[b'output'] = ui.popbuffer()
+
+ else:
+ may_capture_output = util.nullcontextmanager
+ with may_capture_output():
+ with util.timedcm('debug-discovery') as t:
+ common, hds = doit(localrevs, remoterevs)
# compute all statistics
heads_common = set(common)
@@ -1066,50 +1193,41 @@
data[b'nb-ini_und-common'] = len(common_initial_undecided)
data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
+ fm.startitem()
+ fm.data(**pycompat.strkwargs(data))
# display discovery summary
- ui.writenoi18n(b"elapsed time: %(elapsed)f seconds\n" % data)
- ui.writenoi18n(b"round-trips: %(total-roundtrips)9d\n" % data)
- ui.writenoi18n(b"heads summary:\n")
- ui.writenoi18n(b" total common heads: %(nb-common-heads)9d\n" % data)
- ui.writenoi18n(
- b" also local heads: %(nb-common-heads-local)9d\n" % data
- )
- ui.writenoi18n(
- b" also remote heads: %(nb-common-heads-remote)9d\n" % data
- )
- ui.writenoi18n(b" both: %(nb-common-heads-both)9d\n" % data)
- ui.writenoi18n(b" local heads: %(nb-head-local)9d\n" % data)
- ui.writenoi18n(
- b" common: %(nb-common-heads-local)9d\n" % data
- )
- ui.writenoi18n(
- b" missing: %(nb-head-local-missing)9d\n" % data
- )
- ui.writenoi18n(b" remote heads: %(nb-head-remote)9d\n" % data)
- ui.writenoi18n(
- b" common: %(nb-common-heads-remote)9d\n" % data
- )
- ui.writenoi18n(
- b" unknown: %(nb-head-remote-unknown)9d\n" % data
- )
- ui.writenoi18n(b"local changesets: %(nb-revs)9d\n" % data)
- ui.writenoi18n(b" common: %(nb-revs-common)9d\n" % data)
- ui.writenoi18n(b" heads: %(nb-common-heads)9d\n" % data)
- ui.writenoi18n(b" roots: %(nb-common-roots)9d\n" % data)
- ui.writenoi18n(b" missing: %(nb-revs-missing)9d\n" % data)
- ui.writenoi18n(b" heads: %(nb-missing-heads)9d\n" % data)
- ui.writenoi18n(b" roots: %(nb-missing-roots)9d\n" % data)
- ui.writenoi18n(b" first undecided set: %(nb-ini_und)9d\n" % data)
- ui.writenoi18n(b" heads: %(nb-ini_und-heads)9d\n" % data)
- ui.writenoi18n(b" roots: %(nb-ini_und-roots)9d\n" % data)
- ui.writenoi18n(b" common: %(nb-ini_und-common)9d\n" % data)
- ui.writenoi18n(b" missing: %(nb-ini_und-missing)9d\n" % data)
+ fm.plain(b"elapsed time: %(elapsed)f seconds\n" % data)
+ fm.plain(b"round-trips: %(total-roundtrips)9d\n" % data)
+ fm.plain(b"heads summary:\n")
+ fm.plain(b" total common heads: %(nb-common-heads)9d\n" % data)
+ fm.plain(b" also local heads: %(nb-common-heads-local)9d\n" % data)
+ fm.plain(b" also remote heads: %(nb-common-heads-remote)9d\n" % data)
+ fm.plain(b" both: %(nb-common-heads-both)9d\n" % data)
+ fm.plain(b" local heads: %(nb-head-local)9d\n" % data)
+ fm.plain(b" common: %(nb-common-heads-local)9d\n" % data)
+ fm.plain(b" missing: %(nb-head-local-missing)9d\n" % data)
+ fm.plain(b" remote heads: %(nb-head-remote)9d\n" % data)
+ fm.plain(b" common: %(nb-common-heads-remote)9d\n" % data)
+ fm.plain(b" unknown: %(nb-head-remote-unknown)9d\n" % data)
+ fm.plain(b"local changesets: %(nb-revs)9d\n" % data)
+ fm.plain(b" common: %(nb-revs-common)9d\n" % data)
+ fm.plain(b" heads: %(nb-common-heads)9d\n" % data)
+ fm.plain(b" roots: %(nb-common-roots)9d\n" % data)
+ fm.plain(b" missing: %(nb-revs-missing)9d\n" % data)
+ fm.plain(b" heads: %(nb-missing-heads)9d\n" % data)
+ fm.plain(b" roots: %(nb-missing-roots)9d\n" % data)
+ fm.plain(b" first undecided set: %(nb-ini_und)9d\n" % data)
+ fm.plain(b" heads: %(nb-ini_und-heads)9d\n" % data)
+ fm.plain(b" roots: %(nb-ini_und-roots)9d\n" % data)
+ fm.plain(b" common: %(nb-ini_und-common)9d\n" % data)
+ fm.plain(b" missing: %(nb-ini_und-missing)9d\n" % data)
if ui.verbose:
- ui.writenoi18n(
+ fm.plain(
b"common heads: %s\n"
% b" ".join(sorted(short(n) for n in heads_common))
)
+ fm.end()
_chunksize = 4 << 10
@@ -2214,9 +2332,9 @@
b'',
b'dump-new',
False,
- _(b'write a (new) persistent binary nodemap on stdin'),
+ _(b'write a (new) persistent binary nodemap on stdout'),
),
- (b'', b'dump-disk', False, _(b'dump on-disk data on stdin')),
+ (b'', b'dump-disk', False, _(b'dump on-disk data on stdout')),
(
b'',
b'check',
@@ -2546,12 +2664,17 @@
with ui.configoverride(overrides):
peer = hg.peer(ui, {}, path)
- local = peer.local() is not None
- canpush = peer.canpush()
-
- ui.write(_(b'url: %s\n') % peer.url())
- ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
- ui.write(_(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no')))
+ try:
+ local = peer.local() is not None
+ canpush = peer.canpush()
+
+ ui.write(_(b'url: %s\n') % peer.url())
+ ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
+ ui.write(
+ _(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no'))
+ )
+ finally:
+ peer.close()
@command(
@@ -2654,26 +2777,30 @@
"""
target = hg.peer(ui, {}, repopath)
- if keyinfo:
- key, old, new = keyinfo
- with target.commandexecutor() as e:
- r = e.callcommand(
- b'pushkey',
- {
- b'namespace': namespace,
- b'key': key,
- b'old': old,
- b'new': new,
- },
- ).result()
-
- ui.status(pycompat.bytestr(r) + b'\n')
- return not r
- else:
- for k, v in sorted(pycompat.iteritems(target.listkeys(namespace))):
- ui.write(
- b"%s\t%s\n" % (stringutil.escapestr(k), stringutil.escapestr(v))
- )
+ try:
+ if keyinfo:
+ key, old, new = keyinfo
+ with target.commandexecutor() as e:
+ r = e.callcommand(
+ b'pushkey',
+ {
+ b'namespace': namespace,
+ b'key': key,
+ b'old': old,
+ b'new': new,
+ },
+ ).result()
+
+ ui.status(pycompat.bytestr(r) + b'\n')
+ return not r
+ else:
+ for k, v in sorted(pycompat.iteritems(target.listkeys(namespace))):
+ ui.write(
+ b"%s\t%s\n"
+ % (stringutil.escapestr(k), stringutil.escapestr(v))
+ )
+ finally:
+ target.close()
@command(b'debugpvec', [], _(b'A B'))
@@ -3527,8 +3654,10 @@
)
source = b"default"
- source, branches = hg.parseurl(ui.expandpath(source))
- url = util.url(source)
+ source, branches = urlutil.get_unique_pull_path(
+ b'debugssl', repo, ui, source
+ )
+ url = urlutil.url(source)
defaultport = {b'https': 443, b'ssh': 22}
if url.scheme in defaultport:
@@ -3636,8 +3765,14 @@
for backup in backups:
# Much of this is copied from the hg incoming logic
- source = ui.expandpath(os.path.relpath(backup, encoding.getcwd()))
- source, branches = hg.parseurl(source, opts.get(b"branch"))
+ source = os.path.relpath(backup, encoding.getcwd())
+ source, branches = urlutil.get_unique_pull_path(
+ b'debugbackupbundle',
+ repo,
+ ui,
+ source,
+ default_branches=opts.get(b'branch'),
+ )
try:
other = hg.peer(repo, opts, source)
except error.LookupError as ex:
@@ -3719,6 +3854,23 @@
ui.writenoi18n(b' revision %s\n' % v[1])
+@command(b'debugshell', optionalrepo=True)
+def debugshell(ui, repo):
+ """run an interactive Python interpreter
+
+ The local namespace is provided with a reference to the ui and
+ the repo instance (if available).
+ """
+ import code
+
+ imported_objects = {
+ 'ui': ui,
+ 'repo': repo,
+ }
+
+ code.interact(local=imported_objects)
+
+
@command(
b'debugsuccessorssets',
[(b'', b'closest', False, _(b'return closest successors sets only'))],
@@ -3779,10 +3931,19 @@
def debugtagscache(ui, repo):
"""display the contents of .hg/cache/hgtagsfnodes1"""
cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
+ flog = repo.file(b'.hgtags')
for r in repo:
node = repo[r].node()
tagsnode = cache.getfnode(node, computemissing=False)
- tagsnodedisplay = hex(tagsnode) if tagsnode else b'missing/invalid'
+ if tagsnode:
+ tagsnodedisplay = hex(tagsnode)
+ if not flog.hasnode(tagsnode):
+ tagsnodedisplay += b' (unknown node)'
+ elif tagsnode is None:
+ tagsnodedisplay = b'missing'
+ else:
+ tagsnodedisplay = b'invalid'
+
ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
@@ -4000,19 +4161,22 @@
def debugwireargs(ui, repopath, *vals, **opts):
opts = pycompat.byteskwargs(opts)
repo = hg.peer(ui, opts, repopath)
- for opt in cmdutil.remoteopts:
- del opts[opt[1]]
- args = {}
- for k, v in pycompat.iteritems(opts):
- if v:
- args[k] = v
- args = pycompat.strkwargs(args)
- # run twice to check that we don't mess up the stream for the next command
- res1 = repo.debugwireargs(*vals, **args)
- res2 = repo.debugwireargs(*vals, **args)
- ui.write(b"%s\n" % res1)
- if res1 != res2:
- ui.warn(b"%s\n" % res2)
+ try:
+ for opt in cmdutil.remoteopts:
+ del opts[opt[1]]
+ args = {}
+ for k, v in pycompat.iteritems(opts):
+ if v:
+ args[k] = v
+ args = pycompat.strkwargs(args)
+ # run twice to check that we don't mess up the stream for the next command
+ res1 = repo.debugwireargs(*vals, **args)
+ res2 = repo.debugwireargs(*vals, **args)
+ ui.write(b"%s\n" % res1)
+ if res1 != res2:
+ ui.warn(b"%s\n" % res2)
+ finally:
+ repo.close()
def _parsewirelangblocks(fh):
@@ -4372,7 +4536,7 @@
# We bypass hg.peer() so we can proxy the sockets.
# TODO consider not doing this because we skip
# ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
- u = util.url(path)
+ u = urlutil.url(path)
if u.scheme != b'http':
raise error.Abort(_(b'only http:// paths are currently supported'))
diff -r bc268ea9f984 -r f67b8946bb1b mercurial/destutil.py
--- a/mercurial/destutil.py Thu Mar 25 19:06:28 2021 -0400
+++ b/mercurial/destutil.py Tue Apr 20 11:01:06 2021 -0400
@@ -1,6 +1,6 @@
# destutil.py - Mercurial utility function for command destination
#
-# Copyright Matt Mackall and other
+# Copyright Olivia Mackall and other
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
diff -r bc268ea9f984 -r f67b8946bb1b mercurial/diffhelper.py
--- a/mercurial/diffhelper.py Thu Mar 25 19:06:28 2021 -0400
+++ b/mercurial/diffhelper.py Tue Apr 20 11:01:06 2021 -0400
@@ -1,6 +1,6 @@
# diffhelper.py - helper routines for patch
#
-# Copyright 2009 Matt Mackall and others
+# Copyright 2009 Olivia Mackall and others
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
diff -r bc268ea9f984 -r f67b8946bb1b mercurial/dirstate.py
--- a/mercurial/dirstate.py Thu Mar 25 19:06:28 2021 -0400
+++ b/mercurial/dirstate.py Tue Apr 20 11:01:06 2021 -0400
@@ -1,6 +1,6 @@
# dirstate.py - working directory tracking for mercurial
#
-# Copyright 2005-2007 Matt Mackall
+# Copyright 2005-2007 Olivia Mackall
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
@@ -73,13 +73,16 @@
@interfaceutil.implementer(intdirstate.idirstate)
class dirstate(object):
- def __init__(self, opener, ui, root, validate, sparsematchfn):
+ def __init__(
+ self, opener, ui, root, validate, sparsematchfn, nodeconstants
+ ):
"""Create a new dirstate object.
opener is an open()-like callable that can be used to open the
dirstate file; root is the root of the directory tracked by
the dirstate.
"""
+ self._nodeconstants = nodeconstants
self._opener = opener
self._validate = validate
self._root = root
@@ -136,7 +139,9 @@
@propertycache
def _map(self):
"""Return the dirstate contents (see documentation for dirstatemap)."""
- self._map = self._mapcls(self._ui, self._opener, self._root)
+ self._map = self._mapcls(
+ self._ui, self._opener, self._root, self._nodeconstants
+ )
return self._map
@property
@@ -1425,12 +1430,13 @@
denormalized form that they appear as in the dirstate.
"""
- def __init__(self, ui, opener, root):
+ def __init__(self, ui, opener, root, nodeconstants):
self._ui = ui
self._opener = opener
self._root = root
self._filename = b'dirstate'
self._nodelen = 20
+ self._nodeconstants = nodeconstants
self._parents = None
self._dirtyparents = False
@@ -1729,7 +1735,8 @@
if rustmod is not None:
class dirstatemap(object):
- def __init__(self, ui, opener, root):
+ def __init__(self, ui, opener, root, nodeconstants):
+ self._nodeconstants = nodeconstants
self._ui = ui
self._opener = opener
self._root = root
diff -r bc268ea9f984 -r f67b8946bb1b mercurial/dirstateguard.py
--- a/mercurial/dirstateguard.py Thu Mar 25 19:06:28 2021 -0400
+++ b/mercurial/dirstateguard.py Tue Apr 20 11:01:06 2021 -0400
@@ -1,6 +1,6 @@
# dirstateguard.py - class to allow restoring dirstate after failure
#
-# Copyright 2005-2007 Matt Mackall
+# Copyright 2005-2007 Olivia Mackall
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
diff -r bc268ea9f984 -r f67b8946bb1b mercurial/discovery.py
--- a/mercurial/discovery.py Thu Mar 25 19:06:28 2021 -0400
+++ b/mercurial/discovery.py Tue Apr 20 11:01:06 2021 -0400
@@ -1,6 +1,6 @@
# discovery.py - protocol changeset discovery functions
#
-# Copyright 2010 Matt Mackall
+# Copyright 2010 Olivia Mackall
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
@@ -270,9 +270,12 @@
# C. Update newmap with outgoing changes.
# This will possibly add new heads and remove existing ones.
newmap = branchmap.remotebranchcache(
- (branch, heads[1])
- for branch, heads in pycompat.iteritems(headssum)
- if heads[0] is not None
+ repo,
+ (
+ (branch, heads[1])
+ for branch, heads in pycompat.iteritems(headssum)
+ if heads[0] is not None
+ ),
)
newmap.update(repo, (ctx.rev() for ctx in missingctx))
for branch, newheads in pycompat.iteritems(newmap):
diff -r bc268ea9f984 -r f67b8946bb1b mercurial/dispatch.py
--- a/mercurial/dispatch.py Thu Mar 25 19:06:28 2021 -0400
+++ b/mercurial/dispatch.py Tue Apr 20 11:01:06 2021 -0400
@@ -1,6 +1,6 @@
# dispatch.py - command dispatching for mercurial
#
-# Copyright 2005-2007 Matt Mackall
+# Copyright 2005-2007 Olivia Mackall
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
@@ -50,6 +50,7 @@
from .utils import (
procutil,
stringutil,
+ urlutil,
)
@@ -990,7 +991,7 @@
lui.readconfig(os.path.join(path, b".hg", b"hgrc-not-shared"), path)
if rpath:
- path = lui.expandpath(rpath)
+ path = urlutil.get_clone_path(lui, rpath)[0]
lui = ui.copy()
if rcutil.use_repo_hgrc():
_readsharedsourceconfig(lui, path)
diff -r bc268ea9f984 -r f67b8946bb1b mercurial/encoding.py
--- a/mercurial/encoding.py Thu Mar 25 19:06:28 2021 -0400
+++ b/mercurial/encoding.py Tue Apr 20 11:01:06 2021 -0400
@@ -1,6 +1,6 @@
# encoding.py - character transcoding support for Mercurial
#
-# Copyright 2005-2009 Matt Mackall and others
+# Copyright 2005-2009 Olivia Mackall and others
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
diff -r bc268ea9f984 -r f67b8946bb1b mercurial/error.py
--- a/mercurial/error.py Thu Mar 25 19:06:28 2021 -0400
+++ b/mercurial/error.py Tue Apr 20 11:01:06 2021 -0400
@@ -1,6 +1,6 @@
# error.py - Mercurial exceptions
#
-# Copyright 2005-2008 Matt Mackall
+# Copyright 2005-2008 Olivia Mackall
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
@@ -20,7 +20,13 @@
if pycompat.TYPE_CHECKING:
from typing import (
+ Any,
+ AnyStr,
+ Iterable,
+ List,
Optional,
+ Sequence,
+ Union,
)
@@ -60,6 +66,7 @@
class SidedataHashError(RevlogError):
def __init__(self, key, expected, got):
+ self.hint = None
self.sidedatakey = key
self.expecteddigest = expected
self.actualdigest = got
@@ -77,9 +84,9 @@
# Python 2.6+ complain about the 'message' property being deprecated
self.lookupmessage = message
if isinstance(name, bytes) and len(name) == 20:
- from .node import short
+ from .node import hex
- name = short(name)
+ name = hex(name)
# if name is a binary node, it can be None
RevlogError.__init__(
self, b'%s@%s: %s' % (index, pycompat.bytestr(name), message)
@@ -108,6 +115,7 @@
"""Exception raised on errors in parsing the command line."""
def __init__(self, command, message):
+ # type: (bytes, bytes) -> None
self.command = command
self.message = message
super(CommandError, self).__init__()
@@ -119,6 +127,7 @@
"""Exception raised if command is not in the command table."""
def __init__(self, command, all_commands=None):
+ # type: (bytes, Optional[List[bytes]]) -> None
self.command = command
self.all_commands = all_commands
super(UnknownCommand, self).__init__()
@@ -130,6 +139,7 @@
"""Exception raised if command shortcut matches more than one command."""
def __init__(self, prefix, matches):
+ # type: (bytes, List[bytes]) -> None
self.prefix = prefix
self.matches = matches
super(AmbiguousCommand, self).__init__()
@@ -141,6 +151,7 @@
"""Exception raised when a worker process dies."""
def __init__(self, status_code):
+ # type: (int) -> None
self.status_code = status_code
# Pass status code to superclass just so it becomes part of __bytes__
super(WorkerError, self).__init__(status_code)
@@ -158,6 +169,7 @@
"""Exception raised when a continuable command required merge conflict resolution."""
def __init__(self, opname):
+ # type: (bytes) -> None
from .i18n import _
self.opname = opname
@@ -193,6 +205,7 @@
return pycompat.sysstr(self.__bytes__())
def format(self):
+ # type: () -> bytes
from .i18n import _
message = _(b"abort: %s\n") % self.message
@@ -246,10 +259,12 @@
"""Exception raised when parsing config files"""
def __init__(self, message, location=None, hint=None):
+ # type: (bytes, Optional[bytes], Optional[bytes]) -> None
super(ConfigError, self).__init__(message, hint=hint)
self.location = location
def format(self):
+ # type: () -> bytes
from .i18n import _
if self.location is not None:
@@ -289,20 +304,34 @@
Abort.__init__(self, _(b'response expected'))
-class OutOfBandError(Hint, Exception):
+class RemoteError(Abort):
+ """Exception raised when interacting with a remote repo fails"""
+
+
+class OutOfBandError(RemoteError):
"""Exception raised when a remote repo reports failure"""
- __bytes__ = _tobytes
+ def __init__(self, message=None, hint=None):
+ from .i18n import _
+
+ if message:
+ # Abort.format() adds a trailing newline
+ message = _(b"remote error:\n%s") % message.rstrip(b'\n')
+ else:
+ message = _(b"remote error")
+ super(OutOfBandError, self).__init__(message, hint=hint)
class ParseError(Abort):
"""Raised when parsing config files and {rev,file}sets (msg[, pos])"""
def __init__(self, message, location=None, hint=None):
+ # type: (bytes, Optional[Union[bytes, int]], Optional[bytes]) -> None
super(ParseError, self).__init__(message, hint=hint)
self.location = location
def format(self):
+ # type: () -> bytes
from .i18n import _
if self.location is not None:
@@ -322,6 +351,7 @@
def getsimilar(symbols, value):
+ # type: (Iterable[bytes], bytes) -> List[bytes]
sim = lambda x: difflib.SequenceMatcher(None, value, x).ratio()
# The cutoff for similarity here is pretty arbitrary. It should
# probably be investigated and tweaked.
@@ -329,6 +359,7 @@
def similarity_hint(similar):
+ # type: (List[bytes]) -> Optional[bytes]
from .i18n import _
if len(similar) == 1:
@@ -344,6 +375,7 @@
"""Exception raised when a {rev,file}set references an unknown identifier"""
def __init__(self, function, symbols):
+ # type: (bytes, Iterable[bytes]) -> None
from .i18n import _
similar = getsimilar(symbols, function)
@@ -378,6 +410,7 @@
"""Raised if I/O to stdout or stderr fails"""
def __init__(self, err):
+ # type: (IOError) -> None
IOError.__init__(self, err.errno, err.strerror)
# no __bytes__() because error message is derived from the standard IOError
@@ -385,6 +418,7 @@
class UnsupportedMergeRecords(Abort):
def __init__(self, recordtypes):
+ # type: (Iterable[bytes]) -> None
from .i18n import _
self.recordtypes = sorted(recordtypes)
@@ -403,12 +437,15 @@
"""generic exception for aborting from an encounter with an unknown version"""
def __init__(self, msg, hint=None, version=None):
+ # type: (bytes, Optional[bytes], Optional[bytes]) -> None
self.version = version
super(UnknownVersion, self).__init__(msg, hint=hint)
class LockError(IOError):
def __init__(self, errno, strerror, filename, desc):
+ # TODO: figure out if this should be bytes or str
+ # _type: (int, str, str, bytes) -> None
IOError.__init__(self, errno, strerror, filename)
self.desc = desc
@@ -455,6 +492,7 @@
"""Raised if a mercurial (core or extension) developer made a mistake"""
def __init__(self, msg, *args, **kwargs):
+ # type: (AnyStr, Any, Any) -> None
# On Python 3, turn the message back into a string since this is
# an internal-only error that won't be printed except in a
# stack traces.
@@ -498,7 +536,7 @@
entries.append(b"%s=%r" % (par, pycompat.maybebytestr(val)))
if entries:
msg = b'%s - %s' % (msg, b', '.join(entries))
- ValueError.__init__(self, msg)
+ ValueError.__init__(self, msg) # TODO: convert to str?
class ReadOnlyPartError(RuntimeError):
@@ -532,6 +570,7 @@
"""
def __init__(self, filename, node, tombstone):
+ # type: (bytes, bytes, bytes) -> None
from .node import short
StorageError.__init__(self, b'%s:%s' % (filename, short(node)))
@@ -587,5 +626,6 @@
"""
def __init__(self, message, args=None):
+ # type: (bytes, Optional[Sequence[bytes]]) -> None
self.message = message
self.messageargs = args
diff -r bc268ea9f984 -r f67b8946bb1b mercurial/exchange.py
--- a/mercurial/exchange.py Thu Mar 25 19:06:28 2021 -0400
+++ b/mercurial/exchange.py Tue Apr 20 11:01:06 2021 -0400
@@ -1,6 +1,6 @@
# exchange.py - utility to exchange data between repos.
#
-# Copyright 2005-2007 Matt Mackall
+# Copyright 2005-2007 Olivia Mackall
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
@@ -42,6 +42,7 @@
from .utils import (
hashutil,
stringutil,
+ urlutil,
)
urlerr = util.urlerr
@@ -420,7 +421,20 @@
b'unbundle wire protocol command'
)
)
-
+ for category in sorted(bundle2.read_remote_wanted_sidedata(pushop.remote)):
+ # Check that a computer is registered for that category for at least
+ # one revlog kind.
+ for kind, computers in repo._sidedata_computers.items():
+ if computers.get(category):
+ break
+ else:
+ raise error.Abort(
+ _(
+ b'cannot push: required sidedata category not supported'
+ b" by this client: '%s'"
+ )
+ % pycompat.bytestr(category)
+ )
# get lock as we might write phase data
wlock = lock = None
try:
@@ -814,7 +828,7 @@
data = []
for book, old, new in pushop.outbookmarks:
data.append((book, old))
- checkdata = bookmod.binaryencode(data)
+ checkdata = bookmod.binaryencode(pushop.repo, data)
bundler.newpart(b'check:bookmarks', data=checkdata)
@@ -865,8 +879,15 @@
if not cgversions:
raise error.Abort(_(b'no common changegroup version'))
version = max(cgversions)
+
+ remote_sidedata = bundle2.read_remote_wanted_sidedata(pushop.remote)
cgstream = changegroup.makestream(
- pushop.repo, pushop.outgoing, version, b'push'
+ pushop.repo,
+ pushop.outgoing,
+ version,
+ b'push',
+ bundlecaps=b2caps,
+ remote_sidedata=remote_sidedata,
)
cgpart = bundler.newpart(b'changegroup', data=cgstream)
if cgversions:
@@ -1007,7 +1028,7 @@
_abortonsecretctx(pushop, new, book)
data.append((book, new))
allactions.append((book, _bmaction(old, new)))
- checkdata = bookmod.binaryencode(data)
+ checkdata = bookmod.binaryencode(pushop.repo, data)
bundler.newpart(b'bookmarks', data=checkdata)
def handlereply(op):
@@ -1126,19 +1147,19 @@
},
).result()
except error.BundleValueError as exc:
- raise error.Abort(_(b'missing support for %s') % exc)
+ raise error.RemoteError(_(b'missing support for %s') % exc)
try:
trgetter = None
if pushback:
trgetter = pushop.trmanager.transaction
op = bundle2.processbundle(pushop.repo, reply, trgetter)
except error.BundleValueError as exc:
- raise error.Abort(_(b'missing support for %s') % exc)
+ raise error.RemoteError(_(b'missing support for %s') % exc)
except bundle2.AbortFromPart as exc:
- pushop.ui.status(_(b'remote: %s\n') % exc)
+ pushop.ui.error(_(b'remote: %s\n') % exc)
if exc.hint is not None:
- pushop.ui.status(_(b'remote: %s\n') % (b'(%s)' % exc.hint))
- raise error.Abort(_(b'push failed on remote'))
+ pushop.ui.error(_(b'remote: %s\n') % (b'(%s)' % exc.hint))
+ raise error.RemoteError(_(b'push failed on remote'))
except error.PushkeyFailed as exc:
partid = int(exc.partid)
if partid not in pushop.pkfailcb:
@@ -1445,7 +1466,7 @@
def transaction(self):
"""Return an open transaction object, constructing if necessary"""
if not self._tr:
- trname = b'%s\n%s' % (self.source, util.hidepassword(self.url))
+ trname = b'%s\n%s' % (self.source, urlutil.hidepassword(self.url))
self._tr = self.repo.transaction(trname)
self._tr.hookargs[b'source'] = self.source
self._tr.hookargs[b'url'] = self.url
@@ -1607,6 +1628,23 @@
) % (b', '.join(sorted(missing)))
raise error.Abort(msg)
+ for category in repo._wanted_sidedata:
+ # Check that a computer is registered for that category for at least
+ # one revlog kind.
+ for kind, computers in repo._sidedata_computers.items():
+ if computers.get(category):
+ break
+ else:
+ # This should never happen since repos are supposed to be able to
+ # generate the sidedata they require.
+ raise error.ProgrammingError(
+ _(
+ b'sidedata category requested by local side without local'
+ b"support: '%s'"
+ )
+ % pycompat.bytestr(category)
+ )
+
pullop.trmanager = transactionmanager(repo, b'pull', remote.url())
wlock = util.nullcontextmanager()
if not bookmod.bookmarksinstore(repo):
@@ -1820,6 +1858,10 @@
pullop.stepsdone.add(b'obsmarkers')
_pullbundle2extraprepare(pullop, kwargs)
+ remote_sidedata = bundle2.read_remote_wanted_sidedata(pullop.remote)
+ if remote_sidedata:
+ kwargs[b'remote_sidedata'] = remote_sidedata
+
with pullop.remote.commandexecutor() as e:
args = dict(kwargs)
args[b'source'] = b'pull'
@@ -1832,10 +1874,10 @@
op.modes[b'bookmarks'] = b'records'
bundle2.processbundle(pullop.repo, bundle, op=op)
except bundle2.AbortFromPart as exc:
- pullop.repo.ui.status(_(b'remote: abort: %s\n') % exc)
- raise error.Abort(_(b'pull failed on remote'), hint=exc.hint)
+ pullop.repo.ui.error(_(b'remote: abort: %s\n') % exc)
+ raise error.RemoteError(_(b'pull failed on remote'), hint=exc.hint)
except error.BundleValueError as exc:
- raise error.Abort(_(b'missing support for %s') % exc)
+ raise error.RemoteError(_(b'missing support for %s') % exc)
if pullop.fetch:
pullop.cgresult = bundle2.combinechangegroupresults(op)
@@ -2249,7 +2291,13 @@
def getbundlechunks(
- repo, source, heads=None, common=None, bundlecaps=None, **kwargs
+ repo,
+ source,
+ heads=None,
+ common=None,
+ bundlecaps=None,
+ remote_sidedata=None,
+ **kwargs
):
"""Return chunks constituting a bundle's raw data.
@@ -2279,7 +2327,12 @@
return (
info,
changegroup.makestream(
- repo, outgoing, b'01', source, bundlecaps=bundlecaps
+ repo,
+ outgoing,
+ b'01',
+ source,
+ bundlecaps=bundlecaps,
+ remote_sidedata=remote_sidedata,
),
)
@@ -2303,6 +2356,7 @@
source,
bundlecaps=bundlecaps,
b2caps=b2caps,
+ remote_sidedata=remote_sidedata,
**pycompat.strkwargs(kwargs)
)
@@ -2325,6 +2379,7 @@
b2caps=None,
heads=None,
common=None,
+ remote_sidedata=None,
**kwargs
):
"""add a changegroup part to the requested bundle"""
@@ -2355,7 +2410,13 @@
matcher = None
cgstream = changegroup.makestream(
- repo, outgoing, version, source, bundlecaps=bundlecaps, matcher=matcher
+ repo,
+ outgoing,
+ version,
+ source,
+ bundlecaps=bundlecaps,
+ matcher=matcher,
+ remote_sidedata=remote_sidedata,
)
part = bundler.newpart(b'changegroup', data=cgstream)
@@ -2369,6 +2430,8 @@
if b'exp-sidedata-flag' in repo.requirements:
part.addparam(b'exp-sidedata', b'1')
+ sidedata = bundle2.format_remote_wanted_sidedata(repo)
+ part.addparam(b'exp-wanted-sidedata', sidedata)
if (
kwargs.get('narrow', False)
@@ -2393,7 +2456,7 @@
if not b2caps or b'bookmarks' not in b2caps:
raise error.Abort(_(b'no common bookmarks exchange method'))
books = bookmod.listbinbookmarks(repo)
- data = bookmod.binaryencode(books)
+ data = bookmod.binaryencode(repo, books)
if data:
bundler.newpart(b'bookmarks', data=data)
@@ -2585,7 +2648,7 @@
# push can proceed
if not isinstance(cg, bundle2.unbundle20):
# legacy case: bundle1 (changegroup 01)
- txnname = b"\n".join([source, util.hidepassword(url)])
+ txnname = b"\n".join([source, urlutil.hidepassword(url)])
with repo.lock(), repo.transaction(txnname) as tr:
op = bundle2.applybundle(repo, cg, tr, source, url)
r = bundle2.combinechangegroupresults(op)
diff -r bc268ea9f984 -r f67b8946bb1b mercurial/exchangev2.py
--- a/mercurial/exchangev2.py Thu Mar 25 19:06:28 2021 -0400
+++ b/mercurial/exchangev2.py Tue Apr 20 11:01:06 2021 -0400
@@ -22,6 +22,7 @@
narrowspec,
phases,
pycompat,
+ requirements as requirementsmod,
setdiscovery,
)
from .interfaces import repository
@@ -183,7 +184,7 @@
# TODO This is super hacky. There needs to be a storage API for this. We
# also need to check for compatibility with the remote.
- if b'revlogv1' not in repo.requirements:
+ if requirementsmod.REVLOGV1_REQUIREMENT not in repo.requirements:
return False
return True
@@ -358,18 +359,20 @@
# Linkrev for changelog is always self.
return len(cl)
- def ondupchangeset(cl, node):
- added.append(node)
+ def ondupchangeset(cl, rev):
+ added.append(cl.node(rev))
- def onchangeset(cl, node):
+ def onchangeset(cl, rev):
progress.increment()
- revision = cl.changelogrevision(node)
- added.append(node)
+ revision = cl.changelogrevision(rev)
+ added.append(cl.node(rev))
# We need to preserve the mapping of changelog revision to node
# so we can set the linkrev accordingly when manifests are added.
- manifestnodes[cl.rev(node)] = revision.manifest
+ manifestnodes[rev] = revision.manifest
+
+ repo.register_changeset(rev, revision)
nodesbyphase = {phase: set() for phase in phases.phasenames.values()}
remotebookmarks = {}
@@ -414,12 +417,15 @@
mdiff.trivialdiffheader(len(data)) + data,
# Flags not yet supported.
0,
+ # Sidedata not yet supported
+ {},
)
cl.addgroup(
iterrevisions(),
linkrev,
weakref.proxy(tr),
+ alwayscache=True,
addrevisioncb=onchangeset,
duplicaterevisioncb=ondupchangeset,
)
@@ -492,6 +498,8 @@
delta,
# Flags not yet supported.
0,
+ # Sidedata not yet supported.
+ {},
)
progress.increment()
@@ -533,8 +541,8 @@
# Chomp off header object.
next(objs)
- def onchangeset(cl, node):
- added.append(node)
+ def onchangeset(cl, rev):
+ added.append(cl.node(rev))
rootmanifest.addgroup(
iterrevisions(objs, progress),
@@ -617,6 +625,8 @@
delta,
# Flags not yet supported.
0,
+ # Sidedata not yet supported.
+ {},
)
progress.increment()
@@ -715,6 +725,8 @@
delta,
# Flags not yet supported.
0,
+ # Sidedata not yet supported.
+ {},
)
progress.increment()
diff -r bc268ea9f984 -r f67b8946bb1b mercurial/extensions.py
--- a/mercurial/extensions.py Thu Mar 25 19:06:28 2021 -0400
+++ b/mercurial/extensions.py Tue Apr 20 11:01:06 2021 -0400
@@ -1,6 +1,6 @@
# extensions.py - extension handling for mercurial
#
-# Copyright 2005-2007 Matt Mackall
+# Copyright 2005-2007 Olivia Mackall
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
diff -r bc268ea9f984 -r f67b8946bb1b mercurial/exthelper.py
--- a/mercurial/exthelper.py Thu Mar 25 19:06:28 2021 -0400
+++ b/mercurial/exthelper.py Tue Apr 20 11:01:06 2021 -0400
@@ -46,13 +46,22 @@
# ext.py
eh = exthelper.exthelper()
- # As needed:
+ # As needed (failure to do this will mean your registration will not
+ # happen):
cmdtable = eh.cmdtable
configtable = eh.configtable
filesetpredicate = eh.filesetpredicate
revsetpredicate = eh.revsetpredicate
templatekeyword = eh.templatekeyword
+ # As needed (failure to do this will mean your eh.wrap*-decorated
+ # functions will not wrap, and/or your eh.*setup-decorated functions
+ # will not execute):
+ uisetup = eh.finaluisetup
+ extsetup = eh.finalextsetup
+ reposetup = eh.finalreposetup
+ uipopulate = eh.finaluipopulate
+
@eh.command(b'mynewcommand',
[(b'r', b'rev', [], _(b'operate on these revisions'))],
_(b'-r REV...'),
@@ -155,7 +164,7 @@
c(ui)
def finalextsetup(self, ui):
- """Method to be used as a the extension extsetup
+ """Method to be used as the extension extsetup
The following operations belong here:
@@ -201,6 +210,9 @@
example::
+ # Required, otherwise your uisetup function(s) will not execute.
+ uisetup = eh.finaluisetup
+
@eh.uisetup
def setupbabar(ui):
print('this is uisetup!')
@@ -213,6 +225,9 @@
example::
+ # Required, otherwise your uipopulate function(s) will not execute.
+ uipopulate = eh.finaluipopulate
+
@eh.uipopulate
def setupfoo(ui):
print('this is uipopulate!')
@@ -225,6 +240,9 @@
example::
+ # Required, otherwise your extsetup function(s) will not execute.
+ extsetup = eh.finalextsetup
+
@eh.extsetup
def setupcelestine(ui):
print('this is extsetup!')
@@ -237,6 +255,9 @@
example::
+ # Required, otherwise your reposetup function(s) will not execute.
+ reposetup = eh.finalreposetup
+
@eh.reposetup
def setupzephir(ui, repo):
print('this is reposetup!')
@@ -258,6 +279,11 @@
example::
+ # Required if `extension` is not provided
+ uisetup = eh.finaluisetup
+ # Required if `extension` is provided
+ extsetup = eh.finalextsetup
+
@eh.wrapcommand(b'summary')
def wrapsummary(orig, ui, repo, *args, **kwargs):
ui.note(b'Barry!')
@@ -298,8 +324,11 @@
example::
- @eh.function(discovery, b'checkheads')
- def wrapfunction(orig, *args, **kwargs):
+ # Required, otherwise the function will not be wrapped
+ uisetup = eh.finaluisetup
+
+ @eh.wrapfunction(discovery, b'checkheads')
+ def wrapcheckheads(orig, *args, **kwargs):
ui.note(b'His head smashed in and his heart cut out')
return orig(*args, **kwargs)
"""
diff -r bc268ea9f984 -r f67b8946bb1b mercurial/fancyopts.py
--- a/mercurial/fancyopts.py Thu Mar 25 19:06:28 2021 -0400
+++ b/mercurial/fancyopts.py Tue Apr 20 11:01:06 2021 -0400
@@ -1,6 +1,6 @@
# fancyopts.py - better command line parsing
#
-# Copyright 2005-2009 Matt Mackall and others
+# Copyright 2005-2009 Olivia Mackall and others
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
diff -r bc268ea9f984 -r f67b8946bb1b mercurial/filelog.py
--- a/mercurial/filelog.py Thu Mar 25 19:06:28 2021 -0400
+++ b/mercurial/filelog.py Tue Apr 20 11:01:06 2021 -0400
@@ -1,6 +1,6 @@
# filelog.py - file history class for mercurial
#
-# Copyright 2005-2007 Matt Mackall
+# Copyright 2005-2007 Olivia Mackall
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
@@ -32,6 +32,8 @@
# Full name of the user visible file, relative to the repository root.
# Used by LFS.
self._revlog.filename = path
+ self._revlog.revlog_kind = b'filelog'
+ self.nullid = self._revlog.nullid
def __len__(self):
return len(self._revlog)
@@ -102,6 +104,7 @@
revisiondata=False,
assumehaveparentrevisions=False,
deltamode=repository.CG_DELTAMODE_STD,
+ sidedata_helpers=None,
):
return self._revlog.emitrevisions(
nodes,
@@ -109,6 +112,7 @@
revisiondata=revisiondata,
assumehaveparentrevisions=assumehaveparentrevisions,
deltamode=deltamode,
+ sidedata_helpers=sidedata_helpers,
)
def addrevision(
@@ -176,7 +180,8 @@
def add(self, text, meta, transaction, link, p1=None, p2=None):
if meta or text.startswith(b'\1\n'):
text = storageutil.packmeta(meta, text)
- return self.addrevision(text, transaction, link, p1, p2)
+ rev = self.addrevision(text, transaction, link, p1, p2)
+ return self.node(rev)
def renamed(self, node):
return storageutil.filerevisioncopied(self, node)
diff -r bc268ea9f984 -r f67b8946bb1b mercurial/filemerge.py
--- a/mercurial/filemerge.py Thu Mar 25 19:06:28 2021 -0400
+++ b/mercurial/filemerge.py Tue Apr 20 11:01:06 2021 -0400
@@ -1,6 +1,6 @@
# filemerge.py - file-level merge handling for Mercurial
#
-# Copyright 2006, 2007, 2008 Matt Mackall
+# Copyright 2006, 2007, 2008 Olivia Mackall
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
@@ -538,6 +538,25 @@
@internaltool(
+ b'merge3-lie-about-conflicts',
+ fullmerge,
+ b'',
+ precheck=_mergecheck,
+)
+def _imerge3alwaysgood(*args, **kwargs):
+ # Like merge3, but record conflicts as resolved with markers in place.
+ #
+ # This is used for `diff.merge` to show the differences between
+ # the auto-merge state and the committed merge state. It may be
+ # useful for other things.
+ b1, junk, b2 = _imerge3(*args, **kwargs)
+ # TODO is this right? I'm not sure what these return values mean,
+ # but as far as I can tell this will indicate to callers tha the
+ # merge succeeded.
+ return b1, False, b2
+
+
+@internaltool(
b'mergediff',
fullmerge,
_(
@@ -1195,7 +1214,11 @@
def hasconflictmarkers(data):
return bool(
- re.search(b"^(<<<<<<< .*|=======|>>>>>>> .*)$", data, re.MULTILINE)
+ re.search(
+ br"^(<<<<<<<.*|=======.*|------- .*|\+\+\+\+\+\+\+ .*|>>>>>>>.*)$",
+ data,
+ re.MULTILINE,
+ )
)
diff -r bc268ea9f984 -r f67b8946bb1b mercurial/fileset.py
--- a/mercurial/fileset.py Thu Mar 25 19:06:28 2021 -0400
+++ b/mercurial/fileset.py Tue Apr 20 11:01:06 2021 -0400
@@ -1,6 +1,6 @@
# fileset.py - file set queries for mercurial
#
-# Copyright 2010 Matt Mackall
+# Copyright 2010 Olivia Mackall
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
diff -r bc268ea9f984 -r f67b8946bb1b mercurial/filesetlang.py
--- a/mercurial/filesetlang.py Thu Mar 25 19:06:28 2021 -0400
+++ b/mercurial/filesetlang.py Tue Apr 20 11:01:06 2021 -0400
@@ -1,6 +1,6 @@
# filesetlang.py - parser, tokenizer and utility for file set language
#
-# Copyright 2010 Matt Mackall
+# Copyright 2010 Olivia Mackall
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
diff -r bc268ea9f984 -r f67b8946bb1b mercurial/formatter.py
--- a/mercurial/formatter.py Thu Mar 25 19:06:28 2021 -0400
+++ b/mercurial/formatter.py Tue Apr 20 11:01:06 2021 -0400
@@ -1,6 +1,6 @@
# formatter.py - generic output formatting for mercurial
#
-# Copyright 2012 Matt Mackall
+# Copyright 2012 Olivia Mackall
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
@@ -178,6 +178,11 @@
class baseformatter(object):
+
+ # set to True if the formater output a strict format that does not support
+ # arbitrary output in the stream.
+ strict_format = False
+
def __init__(self, ui, topic, opts, converter):
self._ui = ui
self._topic = topic
@@ -418,6 +423,9 @@
class jsonformatter(baseformatter):
+
+ strict_format = True
+
def __init__(self, ui, out, topic, opts):
baseformatter.__init__(self, ui, topic, opts, _nullconverter)
self._out = out
diff -r bc268ea9f984 -r f67b8946bb1b mercurial/grep.py
--- a/mercurial/grep.py Thu Mar 25 19:06:28 2021 -0400
+++ b/mercurial/grep.py Tue Apr 20 11:01:06 2021 -0400
@@ -1,6 +1,6 @@
# grep.py - logic for history walk and grep
#
-# Copyright 2005-2007 Matt Mackall
+# Copyright 2005-2007 Olivia Mackall
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
diff -r bc268ea9f984 -r f67b8946bb1b mercurial/hbisect.py
--- a/mercurial/hbisect.py Thu Mar 25 19:06:28 2021 -0400
+++ b/mercurial/hbisect.py Tue Apr 20 11:01:06 2021 -0400
@@ -1,6 +1,6 @@
# changelog bisection for mercurial
#
-# Copyright 2007 Matt Mackall
+# Copyright 2007 Olivia Mackall
# Copyright 2005, 2006 Benoit Boissinot
#
# Inspired by git bisect, extension skeleton taken from mq.py.
diff -r bc268ea9f984 -r f67b8946bb1b mercurial/help.py
--- a/mercurial/help.py Thu Mar 25 19:06:28 2021 -0400
+++ b/mercurial/help.py Tue Apr 20 11:01:06 2021 -0400
@@ -1,6 +1,6 @@
# help.py - help data for mercurial
#
-# Copyright 2006 Matt Mackall
+# Copyright 2006 Olivia Mackall
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
@@ -829,10 +829,11 @@
def appendcmds(cmds):
cmds = sorted(cmds)
for c in cmds:
+ display_cmd = c
if ui.verbose:
- rst.append(b" :%s: %s\n" % (b', '.join(syns[c]), h[c]))
- else:
- rst.append(b' :%s: %s\n' % (c, h[c]))
+ display_cmd = b', '.join(syns[c])
+ display_cmd = display_cmd.replace(b':', br'\:')
+ rst.append(b' :%s: %s\n' % (display_cmd, h[c]))
if name in (b'shortlist', b'debug'):
# List without categories.
diff -r bc268ea9f984 -r f67b8946bb1b mercurial/helptext/config.txt
--- a/mercurial/helptext/config.txt Thu Mar 25 19:06:28 2021 -0400
+++ b/mercurial/helptext/config.txt Tue Apr 20 11:01:06 2021 -0400
@@ -910,7 +910,8 @@
Repository with this on-disk format require Mercurial version 5.4 or above.
- Disabled by default.
+ By default this format variant is disabled if fast implementation is not
+ available and enabled by default if the fast implementation is available.
``use-share-safe``
Enforce "safe" behaviors for all "shares" that access this repository.
@@ -966,7 +967,7 @@
On some systems, the Mercurial installation may lack `zstd` support.
- Default is `zlib`.
+ Default is `zstd` if available, `zlib` otherwise.
``bookmarks-in-store``
Store bookmarks in .hg/store/. This means that bookmarks are shared when
@@ -1150,7 +1151,7 @@
``pretxnopen``
Run before any new repository transaction is open. The reason for the
transaction will be in ``$HG_TXNNAME``, and a unique identifier for the
- transaction will be in ``HG_TXNID``. A non-zero status will prevent the
+ transaction will be in ``$HG_TXNID``. A non-zero status will prevent the
transaction from being opened.
``pretxnclose``
@@ -1159,12 +1160,13 @@
content or change it. Exit status 0 allows the commit to proceed. A non-zero
status will cause the transaction to be rolled back. The reason for the
transaction opening will be in ``$HG_TXNNAME``, and a unique identifier for
- the transaction will be in ``HG_TXNID``. The rest of the available data will
- vary according the transaction type. New changesets will add ``$HG_NODE``
- (the ID of the first added changeset), ``$HG_NODE_LAST`` (the ID of the last
- added changeset), ``$HG_URL`` and ``$HG_SOURCE`` variables. Bookmark and
- phase changes will set ``HG_BOOKMARK_MOVED`` and ``HG_PHASES_MOVED`` to ``1``
- respectively, etc.
+ the transaction will be in ``$HG_TXNID``. The rest of the available data will
+ vary according the transaction type. Changes unbundled to the repository will
+ add ``$HG_URL`` and ``$HG_SOURCE``. New changesets will add ``$HG_NODE`` (the
+ ID of the first added changeset), ``$HG_NODE_LAST`` (the ID of the last added
+ changeset). Bookmark and phase changes will set ``$HG_BOOKMARK_MOVED`` and
+ ``$HG_PHASES_MOVED`` to ``1`` respectively. The number of new obsmarkers, if
+ any, will be in ``$HG_NEW_OBSMARKERS``, etc.
``pretxnclose-bookmark``
Run right before a bookmark change is actually finalized. Any repository
@@ -1178,7 +1180,7 @@
will be empty.
In addition, the reason for the transaction opening will be in
``$HG_TXNNAME``, and a unique identifier for the transaction will be in
- ``HG_TXNID``.
+ ``$HG_TXNID``.
``pretxnclose-phase``
Run right before a phase change is actually finalized. Any repository change
@@ -1190,7 +1192,7 @@
while the previous ``$HG_OLDPHASE``. In case of new node, ``$HG_OLDPHASE``
will be empty. In addition, the reason for the transaction opening will be in
``$HG_TXNNAME``, and a unique identifier for the transaction will be in
- ``HG_TXNID``. The hook is also run for newly added revisions. In this case
+ ``$HG_TXNID``. The hook is also run for newly added revisions. In this case
the ``$HG_OLDPHASE`` entry will be empty.
``txnclose``
@@ -1701,7 +1703,8 @@
These symbolic names can be used from the command line. To pull
from ``my_server``: :hg:`pull my_server`. To push to ``local_path``:
-:hg:`push local_path`.
+:hg:`push local_path`. You can check :hg:`help urls` for details about
+valid URLs.
Options containing colons (``:``) denote sub-options that can influence
behavior for that specific path. Example::
@@ -1710,6 +1713,9 @@
my_server = https://example.com/my_path
my_server:pushurl = ssh://example.com/my_path
+Paths using the `path://otherpath` scheme will inherit the sub-options value from
+the path they point to.
+
The following sub-options can be defined:
``pushurl``
diff -r bc268ea9f984 -r f67b8946bb1b mercurial/helptext/hg-ssh.8.txt
--- a/mercurial/helptext/hg-ssh.8.txt Thu Mar 25 19:06:28 2021 -0400
+++ b/mercurial/helptext/hg-ssh.8.txt Tue Apr 20 11:01:06 2021 -0400
@@ -52,7 +52,7 @@
Author
""""""
-Written by Matt Mackall
+Written by Olivia Mackall
Resources
"""""""""
@@ -64,7 +64,7 @@
Copying
"""""""
-Copyright (C) 2005-2016 Matt Mackall.
+Copyright (C) 2005-2016 Olivia Mackall.
Free use of this software is granted under the terms of the GNU General
Public License version 2 or any later version.
diff -r bc268ea9f984 -r f67b8946bb1b mercurial/helptext/hg.1.txt
--- a/mercurial/helptext/hg.1.txt Thu Mar 25 19:06:28 2021 -0400
+++ b/mercurial/helptext/hg.1.txt Tue Apr 20 11:01:06 2021 -0400
@@ -6,7 +6,7 @@
Mercurial source code management system
---------------------------------------
-:Author: Matt Mackall
+:Author: Olivia Mackall
:Organization: Mercurial
:Manual section: 1
:Manual group: Mercurial Manual
@@ -100,7 +100,7 @@
Author
""""""
-Written by Matt Mackall
+Written by Olivia Mackall
Resources
"""""""""
@@ -112,7 +112,7 @@
Copying
"""""""
-Copyright (C) 2005-2021 Matt Mackall.
+Copyright (C) 2005-2021 Olivia Mackall.
Free use of this software is granted under the terms of the GNU General
Public License version 2 or any later version.
diff -r bc268ea9f984 -r f67b8946bb1b mercurial/helptext/hgignore.5.txt
--- a/mercurial/helptext/hgignore.5.txt Thu Mar 25 19:06:28 2021 -0400
+++ b/mercurial/helptext/hgignore.5.txt Tue Apr 20 11:01:06 2021 -0400
@@ -17,7 +17,7 @@
======
Vadim Gelfer
-Mercurial was written by Matt Mackall .
+Mercurial was written by Olivia Mackall .
See Also
========
@@ -26,7 +26,7 @@
Copying
=======
This manual page is copyright 2006 Vadim Gelfer.
-Mercurial is copyright 2005-2021 Matt Mackall.
+Mercurial is copyright 2005-2021 Olivia Mackall.
Free use of this software is granted under the terms of the GNU General
Public License version 2 or any later version.
diff -r bc268ea9f984 -r f67b8946bb1b mercurial/helptext/hgrc.5.txt
--- a/mercurial/helptext/hgrc.5.txt Thu Mar 25 19:06:28 2021 -0400
+++ b/mercurial/helptext/hgrc.5.txt Tue Apr 20 11:01:06 2021 -0400
@@ -25,7 +25,7 @@
======
Bryan O'Sullivan .
-Mercurial was written by Matt Mackall .
+Mercurial was written by Olivia Mackall .
See Also
========
@@ -34,7 +34,7 @@
Copying
=======
This manual page is copyright 2005 Bryan O'Sullivan.
-Mercurial is copyright 2005-2021 Matt Mackall.
+Mercurial is copyright 2005-2021 Olivia Mackall.
Free use of this software is granted under the terms of the GNU General
Public License version 2 or any later version.
diff -r bc268ea9f984 -r f67b8946bb1b mercurial/helptext/urls.txt
--- a/mercurial/helptext/urls.txt Thu Mar 25 19:06:28 2021 -0400
+++ b/mercurial/helptext/urls.txt Tue Apr 20 11:01:06 2021 -0400
@@ -5,6 +5,7 @@
http://[user[:pass]@]host[:port]/[path][#revision]
https://[user[:pass]@]host[:port]/[path][#revision]
ssh://[user@]host[:port]/[path][#revision]
+ path://pathname
Paths in the local filesystem can either point to Mercurial
repositories or to bundle files (as created by :hg:`bundle` or
@@ -64,3 +65,12 @@
default-push:
The push command will look for a path named 'default-push', and
prefer it over 'default' if both are defined.
+
+These alias can also be use in the `path://` scheme::
+
+ [paths]
+ alias1 = URL1
+ alias2 = path://alias1
+ ...
+
+check :hg:`help config.paths` for details about the behavior of such "sub-path".
diff -r bc268ea9f984 -r f67b8946bb1b mercurial/hg.py
--- a/mercurial/hg.py Thu Mar 25 19:06:28 2021 -0400
+++ b/mercurial/hg.py Tue Apr 20 11:01:06 2021 -0400
@@ -1,6 +1,6 @@
# hg.py - repository classes for mercurial
#
-# Copyright 2005-2007 Matt Mackall
+# Copyright 2005-2007 Olivia Mackall
# Copyright 2006 Vadim Gelfer
#
# This software may be used and distributed according to the terms of the
@@ -32,6 +32,7 @@
error,
exchange,
extensions,
+ graphmod,
httppeer,
localrepo,
lock,
@@ -55,6 +56,7 @@
from .utils import (
hashutil,
stringutil,
+ urlutil,
)
@@ -65,7 +67,7 @@
def _local(path):
- path = util.expandpath(util.urllocalpath(path))
+ path = util.expandpath(urlutil.urllocalpath(path))
try:
# we use os.stat() directly here instead of os.path.isfile()
@@ -131,13 +133,9 @@
def parseurl(path, branches=None):
'''parse url#branch, returning (url, (branch, branches))'''
-
- u = util.url(path)
- branch = None
- if u.fragment:
- branch = u.fragment
- u.fragment = None
- return bytes(u), (branch, branches or [])
+ msg = b'parseurl(...) moved to mercurial.utils.urlutil'
+ util.nouideprecwarn(msg, b'6.0', stacklevel=2)
+ return urlutil.parseurl(path, branches=branches)
schemes = {
@@ -152,7 +150,7 @@
def _peerlookup(path):
- u = util.url(path)
+ u = urlutil.url(path)
scheme = u.scheme or b'file'
thing = schemes.get(scheme) or schemes[b'file']
try:
@@ -177,7 +175,7 @@
def openpath(ui, path, sendaccept=True):
'''open path with open if local, url.open if remote'''
- pathurl = util.url(path, parsequery=False, parsefragment=False)
+ pathurl = urlutil.url(path, parsequery=False, parsefragment=False)
if pathurl.islocal():
return util.posixfile(pathurl.localpath(), b'rb')
else:
@@ -265,7 +263,7 @@
>>> defaultdest(b'http://example.org/foo/')
'foo'
"""
- path = util.url(source).path
+ path = urlutil.url(source).path
if not path:
return b''
return os.path.basename(os.path.normpath(path))
@@ -284,7 +282,7 @@
# the sharedpath always ends in the .hg; we want the path to the repo
source = repo.vfs.split(repo.sharedpath)[0]
- srcurl, branches = parseurl(source)
+ srcurl, branches = urlutil.parseurl(source)
srcrepo = repository(repo.ui, srcurl)
repo.srcrepo = srcrepo
return srcrepo
@@ -307,11 +305,10 @@
if not dest:
dest = defaultdest(source)
else:
- dest = ui.expandpath(dest)
+ dest = urlutil.get_clone_path(ui, dest)[1]
if isinstance(source, bytes):
- origsource = ui.expandpath(source)
- source, branches = parseurl(origsource)
+ origsource, source, branches = urlutil.get_clone_path(ui, source)
srcrepo = repository(ui, source)
rev, checkout = addbranchrevs(srcrepo, srcrepo, branches, None)
else:
@@ -571,7 +568,7 @@
# Resolve the value to put in [paths] section for the source.
if islocal(source):
- defaultpath = os.path.abspath(util.urllocalpath(source))
+ defaultpath = os.path.abspath(urlutil.urllocalpath(source))
else:
defaultpath = source
@@ -674,150 +671,158 @@
"""
if isinstance(source, bytes):
- origsource = ui.expandpath(source)
- source, branches = parseurl(origsource, branch)
+ src = urlutil.get_clone_path(ui, source, branch)
+ origsource, source, branches = src
srcpeer = peer(ui, peeropts, source)
else:
srcpeer = source.peer() # in case we were called with a localrepo
branches = (None, branch or [])
origsource = source = srcpeer.url()
- revs, checkout = addbranchrevs(srcpeer, srcpeer, branches, revs)
+ srclock = destlock = cleandir = None
+ destpeer = None
+ try:
+ revs, checkout = addbranchrevs(srcpeer, srcpeer, branches, revs)
- if dest is None:
- dest = defaultdest(source)
- if dest:
- ui.status(_(b"destination directory: %s\n") % dest)
- else:
- dest = ui.expandpath(dest)
+ if dest is None:
+ dest = defaultdest(source)
+ if dest:
+ ui.status(_(b"destination directory: %s\n") % dest)
+ else:
+ dest = urlutil.get_clone_path(ui, dest)[0]
- dest = util.urllocalpath(dest)
- source = util.urllocalpath(source)
+ dest = urlutil.urllocalpath(dest)
+ source = urlutil.urllocalpath(source)
- if not dest:
- raise error.InputError(_(b"empty destination path is not valid"))
+ if not dest:
+ raise error.InputError(_(b"empty destination path is not valid"))
- destvfs = vfsmod.vfs(dest, expandpath=True)
- if destvfs.lexists():
- if not destvfs.isdir():
- raise error.InputError(_(b"destination '%s' already exists") % dest)
- elif destvfs.listdir():
- raise error.InputError(_(b"destination '%s' is not empty") % dest)
+ destvfs = vfsmod.vfs(dest, expandpath=True)
+ if destvfs.lexists():
+ if not destvfs.isdir():
+ raise error.InputError(
+ _(b"destination '%s' already exists") % dest
+ )
+ elif destvfs.listdir():
+ raise error.InputError(
+ _(b"destination '%s' is not empty") % dest
+ )
- createopts = {}
- narrow = False
-
- if storeincludepats is not None:
- narrowspec.validatepatterns(storeincludepats)
- narrow = True
+ createopts = {}
+ narrow = False
- if storeexcludepats is not None:
- narrowspec.validatepatterns(storeexcludepats)
- narrow = True
+ if storeincludepats is not None:
+ narrowspec.validatepatterns(storeincludepats)
+ narrow = True
+
+ if storeexcludepats is not None:
+ narrowspec.validatepatterns(storeexcludepats)
+ narrow = True
- if narrow:
- # Include everything by default if only exclusion patterns defined.
- if storeexcludepats and not storeincludepats:
- storeincludepats = {b'path:.'}
+ if narrow:
+ # Include everything by default if only exclusion patterns defined.
+ if storeexcludepats and not storeincludepats:
+ storeincludepats = {b'path:.'}
- createopts[b'narrowfiles'] = True
+ createopts[b'narrowfiles'] = True
- if depth:
- createopts[b'shallowfilestore'] = True
+ if depth:
+ createopts[b'shallowfilestore'] = True
- if srcpeer.capable(b'lfs-serve'):
- # Repository creation honors the config if it disabled the extension, so
- # we can't just announce that lfs will be enabled. This check avoids
- # saying that lfs will be enabled, and then saying it's an unknown
- # feature. The lfs creation option is set in either case so that a
- # requirement is added. If the extension is explicitly disabled but the
- # requirement is set, the clone aborts early, before transferring any
- # data.
- createopts[b'lfs'] = True
+ if srcpeer.capable(b'lfs-serve'):
+ # Repository creation honors the config if it disabled the extension, so
+ # we can't just announce that lfs will be enabled. This check avoids
+ # saying that lfs will be enabled, and then saying it's an unknown
+ # feature. The lfs creation option is set in either case so that a
+ # requirement is added. If the extension is explicitly disabled but the
+ # requirement is set, the clone aborts early, before transferring any
+ # data.
+ createopts[b'lfs'] = True
- if extensions.disabled_help(b'lfs'):
- ui.status(
- _(
- b'(remote is using large file support (lfs), but it is '
- b'explicitly disabled in the local configuration)\n'
+ if extensions.disabled_help(b'lfs'):
+ ui.status(
+ _(
+ b'(remote is using large file support (lfs), but it is '
+ b'explicitly disabled in the local configuration)\n'
+ )
)
- )
- else:
- ui.status(
- _(
- b'(remote is using large file support (lfs); lfs will '
- b'be enabled for this repository)\n'
+ else:
+ ui.status(
+ _(
+ b'(remote is using large file support (lfs); lfs will '
+ b'be enabled for this repository)\n'
+ )
)
- )
- shareopts = shareopts or {}
- sharepool = shareopts.get(b'pool')
- sharenamemode = shareopts.get(b'mode')
- if sharepool and islocal(dest):
- sharepath = None
- if sharenamemode == b'identity':
- # Resolve the name from the initial changeset in the remote
- # repository. This returns nullid when the remote is empty. It
- # raises RepoLookupError if revision 0 is filtered or otherwise
- # not available. If we fail to resolve, sharing is not enabled.
- try:
- with srcpeer.commandexecutor() as e:
- rootnode = e.callcommand(
- b'lookup',
- {
- b'key': b'0',
- },
- ).result()
+ shareopts = shareopts or {}
+ sharepool = shareopts.get(b'pool')
+ sharenamemode = shareopts.get(b'mode')
+ if sharepool and islocal(dest):
+ sharepath = None
+ if sharenamemode == b'identity':
+ # Resolve the name from the initial changeset in the remote
+ # repository. This returns nullid when the remote is empty. It
+ # raises RepoLookupError if revision 0 is filtered or otherwise
+ # not available. If we fail to resolve, sharing is not enabled.
+ try:
+ with srcpeer.commandexecutor() as e:
+ rootnode = e.callcommand(
+ b'lookup',
+ {
+ b'key': b'0',
+ },
+ ).result()
- if rootnode != nullid:
- sharepath = os.path.join(sharepool, hex(rootnode))
- else:
+ if rootnode != nullid:
+ sharepath = os.path.join(sharepool, hex(rootnode))
+ else:
+ ui.status(
+ _(
+ b'(not using pooled storage: '
+ b'remote appears to be empty)\n'
+ )
+ )
+ except error.RepoLookupError:
ui.status(
_(
b'(not using pooled storage: '
- b'remote appears to be empty)\n'
+ b'unable to resolve identity of remote)\n'
)
)
- except error.RepoLookupError:
- ui.status(
- _(
- b'(not using pooled storage: '
- b'unable to resolve identity of remote)\n'
- )
+ elif sharenamemode == b'remote':
+ sharepath = os.path.join(
+ sharepool, hex(hashutil.sha1(source).digest())
+ )
+ else:
+ raise error.Abort(
+ _(b'unknown share naming mode: %s') % sharenamemode
)
- elif sharenamemode == b'remote':
- sharepath = os.path.join(
- sharepool, hex(hashutil.sha1(source).digest())
- )
- else:
- raise error.Abort(
- _(b'unknown share naming mode: %s') % sharenamemode
- )
+
+ # TODO this is a somewhat arbitrary restriction.
+ if narrow:
+ ui.status(
+ _(b'(pooled storage not supported for narrow clones)\n')
+ )
+ sharepath = None
- # TODO this is a somewhat arbitrary restriction.
- if narrow:
- ui.status(_(b'(pooled storage not supported for narrow clones)\n'))
- sharepath = None
+ if sharepath:
+ return clonewithshare(
+ ui,
+ peeropts,
+ sharepath,
+ source,
+ srcpeer,
+ dest,
+ pull=pull,
+ rev=revs,
+ update=update,
+ stream=stream,
+ )
- if sharepath:
- return clonewithshare(
- ui,
- peeropts,
- sharepath,
- source,
- srcpeer,
- dest,
- pull=pull,
- rev=revs,
- update=update,
- stream=stream,
- )
+ srcrepo = srcpeer.local()
- srclock = destlock = cleandir = None
- srcrepo = srcpeer.local()
- try:
abspath = origsource
if islocal(origsource):
- abspath = os.path.abspath(util.urllocalpath(origsource))
+ abspath = os.path.abspath(urlutil.urllocalpath(origsource))
if islocal(dest):
cleandir = dest
@@ -931,7 +936,7 @@
local.setnarrowpats(storeincludepats, storeexcludepats)
narrowspec.copytoworkingcopy(local)
- u = util.url(abspath)
+ u = urlutil.url(abspath)
defaulturl = bytes(u)
local.ui.setconfig(b'paths', b'default', defaulturl, b'clone')
if not stream:
@@ -978,7 +983,7 @@
destrepo = destpeer.local()
if destrepo:
template = uimod.samplehgrcs[b'cloned']
- u = util.url(abspath)
+ u = urlutil.url(abspath)
u.passwd = None
defaulturl = bytes(u)
destrepo.vfs.write(b'hgrc', util.tonativeeol(template % defaulturl))
@@ -1055,6 +1060,8 @@
shutil.rmtree(cleandir, True)
if srcpeer is not None:
srcpeer.close()
+ if destpeer and destpeer.local() is None:
+ destpeer.close()
return srcpeer, destpeer
@@ -1114,6 +1121,7 @@
assert stats.unresolvedcount == 0
if show_stats:
_showstats(repo, stats, quietempty)
+ return False
# naming conflict in updatetotally()
@@ -1246,7 +1254,14 @@
def _incoming(
- displaychlist, subreporecurse, ui, repo, source, opts, buffered=False
+ displaychlist,
+ subreporecurse,
+ ui,
+ repo,
+ source,
+ opts,
+ buffered=False,
+ subpath=None,
):
"""
Helper for incoming / gincoming.
@@ -1254,17 +1269,33 @@
(remoterepo, incomingchangesetlist, displayer) parameters,
and is supposed to contain only code that can't be unified.
"""
- source, branches = parseurl(ui.expandpath(source), opts.get(b'branch'))
+ srcs = urlutil.get_pull_paths(repo, ui, [source], opts.get(b'branch'))
+ srcs = list(srcs)
+ if len(srcs) != 1:
+ msg = _('for now, incoming supports only a single source, %d provided')
+ msg %= len(srcs)
+ raise error.Abort(msg)
+ source, branches = srcs[0]
+ if subpath is not None:
+ subpath = urlutil.url(subpath)
+ if subpath.isabs():
+ source = bytes(subpath)
+ else:
+ p = urlutil.url(source)
+ p.path = os.path.normpath(b'%s/%s' % (p.path, subpath))
+ source = bytes(p)
other = peer(repo, opts, source)
- ui.status(_(b'comparing with %s\n') % util.hidepassword(source))
- revs, checkout = addbranchrevs(repo, other, branches, opts.get(b'rev'))
+ cleanupfn = other.close
+ try:
+ ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(source))
+ revs, checkout = addbranchrevs(repo, other, branches, opts.get(b'rev'))
- if revs:
- revs = [other.lookup(rev) for rev in revs]
- other, chlist, cleanupfn = bundlerepo.getremotechanges(
- ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
- )
- try:
+ if revs:
+ revs = [other.lookup(rev) for rev in revs]
+ other, chlist, cleanupfn = bundlerepo.getremotechanges(
+ ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
+ )
+
if not chlist:
ui.status(_(b"no changes found\n"))
return subreporecurse()
@@ -1280,7 +1311,7 @@
return 0 # exit code is zero since we found incoming changes
-def incoming(ui, repo, source, opts):
+def incoming(ui, repo, source, opts, subpath=None):
def subreporecurse():
ret = 1
if opts.get(b'subrepos'):
@@ -1304,67 +1335,115 @@
count += 1
displayer.show(other[n])
- return _incoming(display, subreporecurse, ui, repo, source, opts)
+ return _incoming(
+ display, subreporecurse, ui, repo, source, opts, subpath=subpath
+ )
-def _outgoing(ui, repo, dest, opts):
- path = ui.paths.getpath(dest, default=(b'default-push', b'default'))
- if not path:
- raise error.Abort(
- _(b'default repository not configured!'),
- hint=_(b"see 'hg help config.paths'"),
- )
- dest = path.pushloc or path.loc
- branches = path.branch, opts.get(b'branch') or []
+def _outgoing(ui, repo, dests, opts, subpath=None):
+ out = set()
+ others = []
+ for path in urlutil.get_push_paths(repo, ui, dests):
+ dest = path.pushloc or path.loc
+ if subpath is not None:
+ subpath = urlutil.url(subpath)
+ if subpath.isabs():
+ dest = bytes(subpath)
+ else:
+ p = urlutil.url(dest)
+ p.path = os.path.normpath(b'%s/%s' % (p.path, subpath))
+ dest = bytes(p)
+ branches = path.branch, opts.get(b'branch') or []
+
+ ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(dest))
+ revs, checkout = addbranchrevs(repo, repo, branches, opts.get(b'rev'))
+ if revs:
+ revs = [repo[rev].node() for rev in scmutil.revrange(repo, revs)]
- ui.status(_(b'comparing with %s\n') % util.hidepassword(dest))
- revs, checkout = addbranchrevs(repo, repo, branches, opts.get(b'rev'))
- if revs:
- revs = [repo[rev].node() for rev in scmutil.revrange(repo, revs)]
+ other = peer(repo, opts, dest)
+ try:
+ outgoing = discovery.findcommonoutgoing(
+ repo, other, revs, force=opts.get(b'force')
+ )
+ o = outgoing.missing
+ out.update(o)
+ if not o:
+ scmutil.nochangesfound(repo.ui, repo, outgoing.excluded)
+ others.append(other)
+ except: # re-raises
+ other.close()
+ raise
+ # make sure this is ordered by revision number
+ outgoing_revs = list(out)
+ cl = repo.changelog
+ outgoing_revs.sort(key=cl.rev)
+ return outgoing_revs, others
- other = peer(repo, opts, dest)
- outgoing = discovery.findcommonoutgoing(
- repo, other, revs, force=opts.get(b'force')
- )
- o = outgoing.missing
- if not o:
- scmutil.nochangesfound(repo.ui, repo, outgoing.excluded)
- return o, other
+
+def _outgoing_recurse(ui, repo, dests, opts):
+ ret = 1
+ if opts.get(b'subrepos'):
+ ctx = repo[None]
+ for subpath in sorted(ctx.substate):
+ sub = ctx.sub(subpath)
+ ret = min(ret, sub.outgoing(ui, dests, opts))
+ return ret
-def outgoing(ui, repo, dest, opts):
- def recurse():
- ret = 1
- if opts.get(b'subrepos'):
- ctx = repo[None]
- for subpath in sorted(ctx.substate):
- sub = ctx.sub(subpath)
- ret = min(ret, sub.outgoing(ui, dest, opts))
- return ret
-
+def _outgoing_filter(repo, revs, opts):
+ """apply revision filtering/ordering option for outgoing"""
limit = logcmdutil.getlimit(opts)
- o, other = _outgoing(ui, repo, dest, opts)
- if not o:
- cmdutil.outgoinghooks(ui, repo, other, opts, o)
- return recurse()
-
+ no_merges = opts.get(b'no_merges')
if opts.get(b'newest_first'):
- o.reverse()
- ui.pager(b'outgoing')
- displayer = logcmdutil.changesetdisplayer(ui, repo, opts)
+ revs.reverse()
+ if limit is None and not no_merges:
+ for r in revs:
+ yield r
+ return
+
count = 0
- for n in o:
+ cl = repo.changelog
+ for n in revs:
if limit is not None and count >= limit:
break
- parents = [p for p in repo.changelog.parents(n) if p != nullid]
- if opts.get(b'no_merges') and len(parents) == 2:
+ parents = [p for p in cl.parents(n) if p != nullid]
+ if no_merges and len(parents) == 2:
continue
count += 1
- displayer.show(repo[n])
- displayer.close()
- cmdutil.outgoinghooks(ui, repo, other, opts, o)
- recurse()
- return 0 # exit code is zero since we found outgoing changes
+ yield n
+
+
+def outgoing(ui, repo, dests, opts, subpath=None):
+ if opts.get(b'graph'):
+ logcmdutil.checkunsupportedgraphflags([], opts)
+ o, others = _outgoing(ui, repo, dests, opts, subpath=subpath)
+ ret = 1
+ try:
+ if o:
+ ret = 0
+
+ if opts.get(b'graph'):
+ revdag = logcmdutil.graphrevs(repo, o, opts)
+ ui.pager(b'outgoing')
+ displayer = logcmdutil.changesetdisplayer(
+ ui, repo, opts, buffered=True
+ )
+ logcmdutil.displaygraph(
+ ui, repo, revdag, displayer, graphmod.asciiedges
+ )
+ else:
+ ui.pager(b'outgoing')
+ displayer = logcmdutil.changesetdisplayer(ui, repo, opts)
+ for n in _outgoing_filter(repo, o, opts):
+ displayer.show(repo[n])
+ displayer.close()
+ for oth in others:
+ cmdutil.outgoinghooks(ui, repo, oth, opts, o)
+ ret = min(ret, _outgoing_recurse(ui, repo, dests, opts))
+ return ret # exit code is zero since we found outgoing changes
+ finally:
+ for oth in others:
+ oth.close()
def verify(repo, level=None):
diff -r bc268ea9f984 -r f67b8946bb1b mercurial/hgweb/__init__.py
--- a/mercurial/hgweb/__init__.py Thu Mar 25 19:06:28 2021 -0400
+++ b/mercurial/hgweb/__init__.py Tue Apr 20 11:01:06 2021 -0400
@@ -1,7 +1,7 @@
# hgweb/__init__.py - web interface to a mercurial repository
#
# Copyright 21 May 2005 - (c) 2005 Jake Edge
-# Copyright 2005 Matt Mackall
+# Copyright 2005 Olivia Mackall
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
diff -r bc268ea9f984 -r f67b8946bb1b mercurial/hgweb/common.py
--- a/mercurial/hgweb/common.py Thu Mar 25 19:06:28 2021 -0400
+++ b/mercurial/hgweb/common.py Tue Apr 20 11:01:06 2021 -0400
@@ -1,7 +1,7 @@
# hgweb/common.py - Utility functions needed by hgweb_mod and hgwebdir_mod
#
# Copyright 21 May 2005 - (c) 2005 Jake Edge
-# Copyright 2005, 2006 Matt Mackall
+# Copyright 2005, 2006 Olivia Mackall
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
diff -r bc268ea9f984 -r f67b8946bb1b mercurial/hgweb/hgweb_mod.py
--- a/mercurial/hgweb/hgweb_mod.py Thu Mar 25 19:06:28 2021 -0400
+++ b/mercurial/hgweb/hgweb_mod.py Tue Apr 20 11:01:06 2021 -0400
@@ -1,7 +1,7 @@
# hgweb/hgweb_mod.py - Web interface for a repository.
#
# Copyright 21 May 2005 - (c) 2005 Jake Edge
-# Copyright 2005-2007 Matt Mackall
+# Copyright 2005-2007 Olivia Mackall
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
diff -r bc268ea9f984 -r f67b8946bb1b mercurial/hgweb/hgwebdir_mod.py
--- a/mercurial/hgweb/hgwebdir_mod.py Thu Mar 25 19:06:28 2021 -0400
+++ b/mercurial/hgweb/hgwebdir_mod.py Tue Apr 20 11:01:06 2021 -0400
@@ -1,7 +1,7 @@
# hgweb/hgwebdir_mod.py - Web interface for a directory of repositories.
#
# Copyright 21 May 2005 - (c) 2005 Jake Edge
-# Copyright 2005, 2006 Matt Mackall
+# Copyright 2005, 2006 Olivia Mackall
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
diff -r bc268ea9f984 -r f67b8946bb1b mercurial/hgweb/request.py
--- a/mercurial/hgweb/request.py Thu Mar 25 19:06:28 2021 -0400
+++ b/mercurial/hgweb/request.py Tue Apr 20 11:01:06 2021 -0400
@@ -1,7 +1,7 @@
# hgweb/request.py - An http request from either CGI or the standalone server.
#
# Copyright 21 May 2005 - (c) 2005 Jake Edge
-# Copyright 2005, 2006 Matt Mackall
+# Copyright 2005, 2006 Olivia Mackall
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
@@ -17,6 +17,9 @@
pycompat,
util,
)
+from ..utils import (
+ urlutil,
+)
class multidict(object):
@@ -184,7 +187,7 @@
reponame = env.get(b'REPO_NAME')
if altbaseurl:
- altbaseurl = util.url(altbaseurl)
+ altbaseurl = urlutil.url(altbaseurl)
# https://www.python.org/dev/peps/pep-0333/#environ-variables defines
# the environment variables.
diff -r bc268ea9f984 -r f67b8946bb1b mercurial/hgweb/server.py
--- a/mercurial/hgweb/server.py Thu Mar 25 19:06:28 2021 -0400
+++ b/mercurial/hgweb/server.py Tue Apr 20 11:01:06 2021 -0400
@@ -1,7 +1,7 @@
# hgweb/server.py - The standalone hg web server.
#
# Copyright 21 May 2005 - (c) 2005 Jake Edge
-# Copyright 2005-2007 Matt Mackall
+# Copyright 2005-2007 Olivia Mackall
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
@@ -28,6 +28,9 @@
pycompat,
util,
)
+from ..utils import (
+ urlutil,
+)
httpservermod = util.httpserver
socketserver = util.socketserver
@@ -431,7 +434,7 @@
sys.setdefaultencoding(oldenc)
address = ui.config(b'web', b'address')
- port = util.getport(ui.config(b'web', b'port'))
+ port = urlutil.getport(ui.config(b'web', b'port'))
try:
return cls(ui, app, (address, port), handler)
except socket.error as inst:
diff -r bc268ea9f984 -r f67b8946bb1b mercurial/hgweb/webcommands.py
--- a/mercurial/hgweb/webcommands.py Thu Mar 25 19:06:28 2021 -0400
+++ b/mercurial/hgweb/webcommands.py Tue Apr 20 11:01:06 2021 -0400
@@ -1,6 +1,6 @@
#
# Copyright 21 May 2005 - (c) 2005 Jake Edge
-# Copyright 2005-2007 Matt Mackall
+# Copyright 2005-2007 Olivia Mackall
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
diff -r bc268ea9f984 -r f67b8946bb1b mercurial/hgweb/webutil.py
--- a/mercurial/hgweb/webutil.py Thu Mar 25 19:06:28 2021 -0400
+++ b/mercurial/hgweb/webutil.py Tue Apr 20 11:01:06 2021 -0400
@@ -1,7 +1,7 @@
# hgweb/webutil.py - utility library for the web interface.
#
# Copyright 21 May 2005 - (c) 2005 Jake Edge
-# Copyright 2005-2007 Matt Mackall
+# Copyright 2005-2007 Olivia Mackall
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
diff -r bc268ea9f984 -r f67b8946bb1b mercurial/hook.py
--- a/mercurial/hook.py Thu Mar 25 19:06:28 2021 -0400
+++ b/mercurial/hook.py Tue Apr 20 11:01:06 2021 -0400
@@ -1,6 +1,6 @@
# hook.py - hook support for mercurial
#
-# Copyright 2007 Matt Mackall
+# Copyright 2007 Olivia Mackall
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
diff -r bc268ea9f984 -r f67b8946bb1b mercurial/httpconnection.py
--- a/mercurial/httpconnection.py Thu Mar 25 19:06:28 2021 -0400
+++ b/mercurial/httpconnection.py Tue Apr 20 11:01:06 2021 -0400
@@ -1,6 +1,6 @@
# httpconnection.py - urllib2 handler for new http support
#
-# Copyright 2005, 2006, 2007, 2008 Matt Mackall
+# Copyright 2005, 2006, 2007, 2008 Olivia Mackall
# Copyright 2006, 2007 Alexis S. L. Carvalho
# Copyright 2006 Vadim Gelfer
# Copyright 2011 Google, Inc.
@@ -18,6 +18,10 @@
pycompat,
util,
)
+from .utils import (
+ urlutil,
+)
+
urlerr = util.urlerr
urlreq = util.urlreq
@@ -99,7 +103,7 @@
if not prefix:
continue
- prefixurl = util.url(prefix)
+ prefixurl = urlutil.url(prefix)
if prefixurl.user and prefixurl.user != user:
# If a username was set in the prefix, it must match the username in
# the URI.
diff -r bc268ea9f984 -r f67b8946bb1b mercurial/httppeer.py
--- a/mercurial/httppeer.py Thu Mar 25 19:06:28 2021 -0400
+++ b/mercurial/httppeer.py Tue Apr 20 11:01:06 2021 -0400
@@ -1,6 +1,6 @@
# httppeer.py - HTTP repository proxy classes for mercurial
#
-# Copyright 2005, 2006 Matt Mackall
+# Copyright 2005, 2006 Olivia Mackall
# Copyright 2006 Vadim Gelfer
#
# This software may be used and distributed according to the terms of the
@@ -38,6 +38,7 @@
from .utils import (
cborutil,
stringutil,
+ urlutil,
)
httplib = util.httplib
@@ -171,9 +172,9 @@
# Send arguments via HTTP headers.
if headersize > 0:
# The headers can typically carry more data than the URL.
- encargs = urlreq.urlencode(sorted(args.items()))
+ encoded_args = urlreq.urlencode(sorted(args.items()))
for header, value in encodevalueinheaders(
- encargs, b'X-HgArg', headersize
+ encoded_args, b'X-HgArg', headersize
):
headers[header] = value
# Send arguments via query string (Mercurial <1.9).
@@ -305,7 +306,7 @@
except httplib.HTTPException as inst:
ui.debug(
b'http error requesting %s\n'
- % util.hidepassword(req.get_full_url())
+ % urlutil.hidepassword(req.get_full_url())
)
ui.traceback()
raise IOError(None, inst)
@@ -352,14 +353,14 @@
except AttributeError:
proto = pycompat.bytesurl(resp.headers.get('content-type', ''))
- safeurl = util.hidepassword(baseurl)
+ safeurl = urlutil.hidepassword(baseurl)
if proto.startswith(b'application/hg-error'):
raise error.OutOfBandError(resp.read())
# Pre 1.0 versions of Mercurial used text/plain and
# application/hg-changegroup. We don't support such old servers.
if not proto.startswith(b'application/mercurial-'):
- ui.debug(b"requested URL: '%s'\n" % util.hidepassword(requrl))
+ ui.debug(b"requested URL: '%s'\n" % urlutil.hidepassword(requrl))
msg = _(
b"'%s' does not appear to be an hg repository:\n"
b"---%%<--- (%s)\n%s\n---%%<---\n"
@@ -1058,7 +1059,7 @@
``requestbuilder`` is the type used for constructing HTTP requests.
It exists as an argument so extensions can override the default.
"""
- u = util.url(path)
+ u = urlutil.url(path)
if u.query or u.fragment:
raise error.Abort(
_(b'unsupported URL component: "%s"') % (u.query or u.fragment)
diff -r bc268ea9f984 -r f67b8946bb1b mercurial/i18n.py
--- a/mercurial/i18n.py Thu Mar 25 19:06:28 2021 -0400
+++ b/mercurial/i18n.py Tue Apr 20 11:01:06 2021 -0400
@@ -1,6 +1,6 @@
# i18n.py - internationalization support for mercurial
#
-# Copyright 2005, 2006 Matt Mackall
+# Copyright 2005, 2006 Olivia Mackall
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
@@ -19,6 +19,13 @@
pycompat,
)
+if pycompat.TYPE_CHECKING:
+ from typing import (
+ Callable,
+ List,
+ )
+
+
# modelled after templater.templatepath:
if getattr(sys, 'frozen', None) is not None:
module = pycompat.sysexecutable
@@ -40,7 +47,10 @@
try:
import ctypes
+ # pytype: disable=module-attr
langid = ctypes.windll.kernel32.GetUserDefaultUILanguage()
+ # pytype: enable=module-attr
+
_languages = [locale.windows_locale[langid]]
except (ImportError, AttributeError, KeyError):
# ctypes not found or unknown langid
@@ -51,7 +61,7 @@
localedir = os.path.join(datapath, 'locale')
t = gettextmod.translation('hg', localedir, _languages, fallback=True)
try:
- _ugettext = t.ugettext
+ _ugettext = t.ugettext # pytype: disable=attribute-error
except AttributeError:
_ugettext = t.gettext
@@ -60,6 +70,7 @@
def gettext(message):
+ # type: (bytes) -> bytes
"""Translate message.
The message is looked up in the catalog to get a Unicode string,
@@ -77,7 +88,7 @@
if message not in cache:
if type(message) is pycompat.unicode:
# goofy unicode docstrings in test
- paragraphs = message.split(u'\n\n')
+ paragraphs = message.split(u'\n\n') # type: List[pycompat.unicode]
else:
# should be ascii, but we have unicode docstrings in test, which
# are converted to utf-8 bytes on Python 3.
@@ -110,6 +121,6 @@
if _plain():
- _ = lambda message: message
+ _ = lambda message: message # type: Callable[[bytes], bytes]
else:
_ = gettext
diff -r bc268ea9f984 -r f67b8946bb1b mercurial/interfaces/dirstate.py
--- a/mercurial/interfaces/dirstate.py Thu Mar 25 19:06:28 2021 -0400
+++ b/mercurial/interfaces/dirstate.py Tue Apr 20 11:01:06 2021 -0400
@@ -8,7 +8,7 @@
class idirstate(interfaceutil.Interface):
- def __init__(opener, ui, root, validate, sparsematchfn):
+ def __init__(opener, ui, root, validate, sparsematchfn, nodeconstants):
"""Create a new dirstate object.
opener is an open()-like callable that can be used to open the
diff -r bc268ea9f984 -r f67b8946bb1b mercurial/interfaces/repository.py
--- a/mercurial/interfaces/repository.py Thu Mar 25 19:06:28 2021 -0400
+++ b/mercurial/interfaces/repository.py Tue Apr 20 11:01:06 2021 -0400
@@ -453,6 +453,10 @@
"""
)
+ sidedata = interfaceutil.Attribute(
+ """Raw sidedata bytes for the given revision."""
+ )
+
class ifilerevisionssequence(interfaceutil.Interface):
"""Contains index data for all revisions of a file.
@@ -519,6 +523,10 @@
* Metadata to facilitate storage.
"""
+ nullid = interfaceutil.Attribute(
+ """node for the null revision for use as delta base."""
+ )
+
def __len__():
"""Obtain the number of revisions stored for this file."""
@@ -734,7 +742,7 @@
flags=0,
cachedelta=None,
):
- """Add a new revision to the store.
+ """Add a new revision to the store and return its number.
This is similar to ``add()`` except it operates at a lower level.
@@ -769,7 +777,14 @@
``nullid``, in which case the header from the delta can be ignored
and the delta used as the fulltext.
- ``addrevisioncb`` should be called for each node as it is committed.
+ ``alwayscache`` instructs the lower layers to cache the content of the
+ newly added revision, even if it needs to be explicitly computed.
+ This used to be the default when ``addrevisioncb`` was provided up to
+ Mercurial 5.8.
+
+ ``addrevisioncb`` should be called for each new rev as it is committed.
+ ``duplicaterevisioncb`` should be called for all revs with a
+ pre-existing node.
``maybemissingparents`` is a bool indicating whether the incoming
data may reference parents/ancestor revisions that aren't present.
@@ -1132,6 +1147,10 @@
class imanifeststorage(interfaceutil.Interface):
"""Storage interface for manifest data."""
+ nodeconstants = interfaceutil.Attribute(
+ """nodeconstants used by the current repository."""
+ )
+
tree = interfaceutil.Attribute(
"""The path to the directory this manifest tracks.
@@ -1355,6 +1374,10 @@
tree manifests.
"""
+ nodeconstants = interfaceutil.Attribute(
+ """nodeconstants used by the current repository."""
+ )
+
def __getitem__(node):
"""Obtain a manifest instance for a given binary node.
@@ -1423,6 +1446,13 @@
This currently captures the reality of things - not how things should be.
"""
+ nodeconstants = interfaceutil.Attribute(
+ """Constant nodes matching the hash function used by the repository."""
+ )
+ nullid = interfaceutil.Attribute(
+ """null revision for the hash function used by the repository."""
+ )
+
supportedformats = interfaceutil.Attribute(
"""Set of requirements that apply to stream clone.
@@ -1641,6 +1671,14 @@
def revbranchcache():
pass
+ def register_changeset(rev, changelogrevision):
+ """Extension point for caches for new nodes.
+
+ Multiple consumers are expected to need parts of the changelogrevision,
+ so it is provided as optimization to avoid duplicate lookups. A simple
+ cache would be fragile when other revisions are accessed, too."""
+ pass
+
def branchtip(branchtip, ignoremissing=False):
"""Return the tip node for a given branch."""
@@ -1813,6 +1851,12 @@
def savecommitmessage(text):
pass
+ def register_sidedata_computer(kind, category, keys, computer):
+ pass
+
+ def register_wanted_sidedata(category):
+ pass
+
class completelocalrepository(
ilocalrepositorymain, ilocalrepositoryfilestorage
diff -r bc268ea9f984 -r f67b8946bb1b mercurial/localrepo.py
--- a/mercurial/localrepo.py Thu Mar 25 19:06:28 2021 -0400
+++ b/mercurial/localrepo.py Tue Apr 20 11:01:06 2021 -0400
@@ -1,6 +1,6 @@
# localrepo.py - read/write repository class for mercurial
#
-# Copyright 2005-2007 Matt Mackall
+# Copyright 2005-2007 Olivia Mackall
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
@@ -21,6 +21,7 @@
hex,
nullid,
nullrev,
+ sha1nodeconstants,
short,
)
from .pycompat import (
@@ -49,6 +50,7 @@
match as matchmod,
mergestate as mergestatemod,
mergeutil,
+ metadata as metadatamod,
namespaces,
narrowspec,
obsolete,
@@ -71,6 +73,7 @@
txnutil,
util,
vfs as vfsmod,
+ wireprototypes,
)
from .interfaces import (
@@ -82,9 +85,13 @@
hashutil,
procutil,
stringutil,
+ urlutil,
)
-from .revlogutils import constants as revlogconst
+from .revlogutils import (
+ concurrency_checker as revlogchecker,
+ constants as revlogconst,
+)
release = lockmod.release
urlerr = util.urlerr
@@ -270,6 +277,11 @@
caps = moderncaps.copy()
self._repo = repo.filtered(b'served')
self.ui = repo.ui
+
+ if repo._wanted_sidedata:
+ formatted = bundle2.format_remote_wanted_sidedata(repo)
+ caps.add(b'exp-wanted-sidedata=' + formatted)
+
self._caps = repo._restrictcapabilities(caps)
# Begin of _basepeer interface.
@@ -313,7 +325,13 @@
)
def getbundle(
- self, source, heads=None, common=None, bundlecaps=None, **kwargs
+ self,
+ source,
+ heads=None,
+ common=None,
+ bundlecaps=None,
+ remote_sidedata=None,
+ **kwargs
):
chunks = exchange.getbundlechunks(
self._repo,
@@ -321,6 +339,7 @@
heads=heads,
common=common,
bundlecaps=bundlecaps,
+ remote_sidedata=remote_sidedata,
**kwargs
)[1]
cb = util.chunkbuffer(chunks)
@@ -452,7 +471,7 @@
# ``.hg/`` for ``relshared``.
sharedpath = hgvfs.read(b'sharedpath').rstrip(b'\n')
if requirementsmod.RELATIVE_SHARED_REQUIREMENT in requirements:
- sharedpath = hgvfs.join(sharedpath)
+ sharedpath = util.normpath(hgvfs.join(sharedpath))
sharedvfs = vfsmod.vfs(sharedpath, realpath=True)
@@ -939,11 +958,10 @@
def makestore(requirements, path, vfstype):
"""Construct a storage object for a repository."""
- if b'store' in requirements:
- if b'fncache' in requirements:
- return storemod.fncachestore(
- path, vfstype, b'dotencode' in requirements
- )
+ if requirementsmod.STORE_REQUIREMENT in requirements:
+ if requirementsmod.FNCACHE_REQUIREMENT in requirements:
+ dotencode = requirementsmod.DOTENCODE_REQUIREMENT in requirements
+ return storemod.fncachestore(path, vfstype, dotencode)
return storemod.encodedstore(path, vfstype)
@@ -971,7 +989,7 @@
# opener options for it because those options wouldn't do anything
# meaningful on such old repos.
if (
- b'revlogv1' in requirements
+ requirementsmod.REVLOGV1_REQUIREMENT in requirements
or requirementsmod.REVLOGV2_REQUIREMENT in requirements
):
options.update(resolverevlogstorevfsoptions(ui, requirements, features))
@@ -995,12 +1013,12 @@
options = {}
options[b'flagprocessors'] = {}
- if b'revlogv1' in requirements:
+ if requirementsmod.REVLOGV1_REQUIREMENT in requirements:
options[b'revlogv1'] = True
if requirementsmod.REVLOGV2_REQUIREMENT in requirements:
options[b'revlogv2'] = True
- if b'generaldelta' in requirements:
+ if requirementsmod.GENERALDELTA_REQUIREMENT in requirements:
options[b'generaldelta'] = True
# experimental config: format.chunkcachesize
@@ -1196,8 +1214,8 @@
# being successful (repository sizes went up due to worse delta
# chains), and the code was deleted in 4.6.
supportedformats = {
- b'revlogv1',
- b'generaldelta',
+ requirementsmod.REVLOGV1_REQUIREMENT,
+ requirementsmod.GENERALDELTA_REQUIREMENT,
requirementsmod.TREEMANIFEST_REQUIREMENT,
requirementsmod.COPIESSDC_REQUIREMENT,
requirementsmod.REVLOGV2_REQUIREMENT,
@@ -1208,11 +1226,11 @@
requirementsmod.SHARESAFE_REQUIREMENT,
}
_basesupported = supportedformats | {
- b'store',
- b'fncache',
+ requirementsmod.STORE_REQUIREMENT,
+ requirementsmod.FNCACHE_REQUIREMENT,
requirementsmod.SHARED_REQUIREMENT,
requirementsmod.RELATIVE_SHARED_REQUIREMENT,
- b'dotencode',
+ requirementsmod.DOTENCODE_REQUIREMENT,
requirementsmod.SPARSE_REQUIREMENT,
requirementsmod.INTERNAL_PHASE_REQUIREMENT,
}
@@ -1315,6 +1333,8 @@
self.vfs = hgvfs
self.path = hgvfs.base
self.requirements = requirements
+ self.nodeconstants = sha1nodeconstants
+ self.nullid = self.nodeconstants.nullid
self.supported = supportedrequirements
self.sharedpath = sharedpath
self.store = store
@@ -1386,6 +1406,10 @@
if requirementsmod.COPIESSDC_REQUIREMENT in self.requirements:
self.filecopiesmode = b'changeset-sidedata'
+ self._wanted_sidedata = set()
+ self._sidedata_computers = {}
+ metadatamod.set_sidedata_spec_for_repo(self)
+
def _getvfsward(self, origfunc):
"""build a ward for self.vfs"""
rref = weakref.ref(self)
@@ -1473,6 +1497,8 @@
bundle2.getrepocaps(self, role=b'client')
)
caps.add(b'bundle2=' + urlreq.quote(capsblob))
+ if self.ui.configbool(b'experimental', b'narrow'):
+ caps.add(wireprototypes.NARROWCAP)
return caps
# Don't cache auditor/nofsauditor, or you'll end up with reference cycle:
@@ -1639,7 +1665,10 @@
def changelog(self):
# load dirstate before changelog to avoid race see issue6303
self.dirstate.prefetch_parents()
- return self.store.changelog(txnutil.mayhavepending(self.root))
+ return self.store.changelog(
+ txnutil.mayhavepending(self.root),
+ concurrencychecker=revlogchecker.get_checker(self.ui, b'changelog'),
+ )
@storecache(b'00manifest.i')
def manifestlog(self):
@@ -1654,7 +1683,12 @@
sparsematchfn = lambda: sparse.matcher(self)
return dirstate.dirstate(
- self.vfs, self.ui, self.root, self._dirstatevalidate, sparsematchfn
+ self.vfs,
+ self.ui,
+ self.root,
+ self._dirstatevalidate,
+ sparsematchfn,
+ self.nodeconstants,
)
def _dirstatevalidate(self, node):
@@ -2059,6 +2093,9 @@
self._revbranchcache = branchmap.revbranchcache(self.unfiltered())
return self._revbranchcache
+ def register_changeset(self, rev, changelogrevision):
+ self.revbranchcache().setdata(rev, changelogrevision)
+
def branchtip(self, branch, ignoremissing=False):
"""return the tip node for a given branch
@@ -3326,6 +3363,22 @@
fp.close()
return self.pathto(fp.name[len(self.root) + 1 :])
+ def register_wanted_sidedata(self, category):
+ self._wanted_sidedata.add(pycompat.bytestr(category))
+
+ def register_sidedata_computer(self, kind, category, keys, computer):
+ if kind not in (b"changelog", b"manifest", b"filelog"):
+ msg = _(b"unexpected revlog kind '%s'.")
+ raise error.ProgrammingError(msg % kind)
+ category = pycompat.bytestr(category)
+ if category in self._sidedata_computers.get(kind, []):
+ msg = _(
+ b"cannot register a sidedata computer twice for category '%s'."
+ )
+ raise error.ProgrammingError(msg % category)
+ self._sidedata_computers.setdefault(kind, {})
+ self._sidedata_computers[kind][category] = (keys, computer)
+
# used to avoid circular references so destructors work
def aftertrans(files):
@@ -3352,7 +3405,7 @@
def instance(ui, path, create, intents=None, createopts=None):
- localpath = util.urllocalpath(path)
+ localpath = urlutil.urllocalpath(path)
if create:
createrepository(ui, localpath, createopts=createopts)
@@ -3410,18 +3463,20 @@
% createopts[b'backend']
)
- requirements = {b'revlogv1'}
+ requirements = {requirementsmod.REVLOGV1_REQUIREMENT}
if ui.configbool(b'format', b'usestore'):
- requirements.add(b'store')
+ requirements.add(requirementsmod.STORE_REQUIREMENT)
if ui.configbool(b'format', b'usefncache'):
- requirements.add(b'fncache')
+ requirements.add(requirementsmod.FNCACHE_REQUIREMENT)
if ui.configbool(b'format', b'dotencode'):
- requirements.add(b'dotencode')
+ requirements.add(requirementsmod.DOTENCODE_REQUIREMENT)
compengines = ui.configlist(b'format', b'revlog-compression')
for compengine in compengines:
if compengine in util.compengines:
- break
+ engine = util.compengines[compengine]
+ if engine.available() and engine.revlogheader():
+ break
else:
raise error.Abort(
_(
@@ -3442,15 +3497,19 @@
requirements.add(b'exp-compression-%s' % compengine)
if scmutil.gdinitconfig(ui):
- requirements.add(b'generaldelta')
+ requirements.add(requirementsmod.GENERALDELTA_REQUIREMENT)
if ui.configbool(b'format', b'sparse-revlog'):
requirements.add(requirementsmod.SPARSEREVLOG_REQUIREMENT)
# experimental config: format.exp-use-side-data
if ui.configbool(b'format', b'exp-use-side-data'):
+ requirements.discard(requirementsmod.REVLOGV1_REQUIREMENT)
+ requirements.add(requirementsmod.REVLOGV2_REQUIREMENT)
requirements.add(requirementsmod.SIDEDATA_REQUIREMENT)
# experimental config: format.exp-use-copies-side-data-changeset
if ui.configbool(b'format', b'exp-use-copies-side-data-changeset'):
+ requirements.discard(requirementsmod.REVLOGV1_REQUIREMENT)
+ requirements.add(requirementsmod.REVLOGV2_REQUIREMENT)
requirements.add(requirementsmod.SIDEDATA_REQUIREMENT)
requirements.add(requirementsmod.COPIESSDC_REQUIREMENT)
if ui.configbool(b'experimental', b'treemanifest'):
@@ -3458,9 +3517,9 @@
revlogv2 = ui.config(b'experimental', b'revlogv2')
if revlogv2 == b'enable-unstable-format-and-corrupt-my-data':
- requirements.remove(b'revlogv1')
+ requirements.discard(requirementsmod.REVLOGV1_REQUIREMENT)
# generaldelta is implied by revlogv2.
- requirements.discard(b'generaldelta')
+ requirements.discard(requirementsmod.GENERALDELTA_REQUIREMENT)
requirements.add(requirementsmod.REVLOGV2_REQUIREMENT)
# experimental config: format.internal-phase
if ui.configbool(b'format', b'internal-phase'):
@@ -3494,7 +3553,7 @@
dropped = set()
- if b'store' not in requirements:
+ if requirementsmod.STORE_REQUIREMENT not in requirements:
if bookmarks.BOOKMARKS_IN_STORE_REQUIREMENT in requirements:
ui.warn(
_(
@@ -3617,6 +3676,7 @@
if createopts.get(b'sharedrelative'):
try:
sharedpath = os.path.relpath(sharedpath, hgvfs.base)
+ sharedpath = util.pconvert(sharedpath)
except (IOError, ValueError) as e:
# ValueError is raised on Windows if the drive letters differ
# on each path.
@@ -3633,7 +3693,8 @@
hgvfs.mkdir(b'cache')
hgvfs.mkdir(b'wcache')
- if b'store' in requirements and b'sharedrepo' not in createopts:
+ has_store = requirementsmod.STORE_REQUIREMENT in requirements
+ if has_store and b'sharedrepo' not in createopts:
hgvfs.mkdir(b'store')
# We create an invalid changelog outside the store so very old
@@ -3642,11 +3703,11 @@
# effectively locks out old clients and prevents them from
# mucking with a repo in an unknown format.
#
- # The revlog header has version 2, which won't be recognized by
+ # The revlog header has version 65535, which won't be recognized by
# such old clients.
hgvfs.append(
b'00changelog.i',
- b'\0\0\0\2 dummy changelog to prevent using the old repo '
+ b'\0\0\xFF\xFF dummy changelog to prevent using the old repo '
b'layout',
)
diff -r bc268ea9f984 -r f67b8946bb1b mercurial/lock.py
--- a/mercurial/lock.py Thu Mar 25 19:06:28 2021 -0400
+++ b/mercurial/lock.py Tue Apr 20 11:01:06 2021 -0400
@@ -1,6 +1,6 @@
# lock.py - simple advisory locking scheme for mercurial
#
-# Copyright 2005, 2006 Matt Mackall
+# Copyright 2005, 2006 Olivia Mackall
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
diff -r bc268ea9f984 -r f67b8946bb1b mercurial/logcmdutil.py
--- a/mercurial/logcmdutil.py Thu Mar 25 19:06:28 2021 -0400
+++ b/mercurial/logcmdutil.py Tue Apr 20 11:01:06 2021 -0400
@@ -1,6 +1,6 @@
# logcmdutil.py - utility for log-like commands
#
-# Copyright 2005-2007 Matt Mackall
+# Copyright 2005-2007 Olivia Mackall
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
@@ -14,6 +14,7 @@
from .i18n import _
from .node import (
nullid,
+ nullrev,
wdirid,
wdirrev,
)
@@ -27,6 +28,7 @@
graphmod,
match as matchmod,
mdiff,
+ merge,
patch,
pathutil,
pycompat,
@@ -74,6 +76,36 @@
return limit
+def diff_parent(ctx):
+ """get the context object to use as parent when diffing
+
+
+ If diff.merge is enabled, an overlayworkingctx of the auto-merged parents will be returned.
+ """
+ repo = ctx.repo()
+ if repo.ui.configbool(b"diff", b"merge") and ctx.p2().rev() != nullrev:
+ # avoid cycle context -> subrepo -> cmdutil -> logcmdutil
+ from . import context
+
+ wctx = context.overlayworkingctx(repo)
+ wctx.setbase(ctx.p1())
+ with repo.ui.configoverride(
+ {
+ (
+ b"ui",
+ b"forcemerge",
+ ): b"internal:merge3-lie-about-conflicts",
+ },
+ b"merge-diff",
+ ):
+ repo.ui.pushbuffer()
+ merge.merge(ctx.p2(), wc=wctx)
+ repo.ui.popbuffer()
+ return wctx
+ else:
+ return ctx.p1()
+
+
def diffordiffstat(
ui,
repo,
@@ -217,7 +249,7 @@
ui,
ctx.repo(),
diffopts,
- ctx.p1(),
+ diff_parent(ctx),
ctx,
match=self._makefilematcher(ctx),
stat=stat,
diff -r bc268ea9f984 -r f67b8946bb1b mercurial/logexchange.py
--- a/mercurial/logexchange.py Thu Mar 25 19:06:28 2021 -0400
+++ b/mercurial/logexchange.py Tue Apr 20 11:01:06 2021 -0400
@@ -15,6 +15,9 @@
util,
vfs as vfsmod,
)
+from .utils import (
+ urlutil,
+)
# directory name in .hg/ in which remotenames files will be present
remotenamedir = b'logexchange'
@@ -117,7 +120,7 @@
# represent the remotepath with user defined path name if exists
for path, url in repo.ui.configitems(b'paths'):
# remove auth info from user defined url
- noauthurl = util.removeauth(url)
+ noauthurl = urlutil.removeauth(url)
# Standardize on unix style paths, otherwise some {remotenames} end up
# being an absolute path on Windows.
diff -r bc268ea9f984 -r f67b8946bb1b mercurial/loggingutil.py
--- a/mercurial/loggingutil.py Thu Mar 25 19:06:28 2021 -0400
+++ b/mercurial/loggingutil.py Tue Apr 20 11:01:06 2021 -0400
@@ -10,7 +10,10 @@
import errno
-from . import pycompat
+from . import (
+ encoding,
+ pycompat,
+)
from .utils import (
dateutil,
@@ -32,7 +35,7 @@
if err.errno != errno.ENOENT:
ui.debug(
b"warning: cannot remove '%s': %s\n"
- % (newpath, err.strerror)
+ % (newpath, encoding.strtolocal(err.strerror))
)
try:
if newpath:
@@ -41,7 +44,7 @@
if err.errno != errno.ENOENT:
ui.debug(
b"warning: cannot rename '%s' to '%s': %s\n"
- % (newpath, oldpath, err.strerror)
+ % (newpath, oldpath, encoding.strtolocal(err.strerror))
)
if maxsize > 0:
diff -r bc268ea9f984 -r f67b8946bb1b mercurial/mail.py
--- a/mercurial/mail.py Thu Mar 25 19:06:28 2021 -0400
+++ b/mercurial/mail.py Tue Apr 20 11:01:06 2021 -0400
@@ -1,6 +1,6 @@
# mail.py - mail sending bits for mercurial
#
-# Copyright 2006 Matt Mackall
+# Copyright 2006 Olivia Mackall
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
@@ -34,6 +34,7 @@
from .utils import (
procutil,
stringutil,
+ urlutil,
)
if pycompat.TYPE_CHECKING:
@@ -139,7 +140,7 @@
defaultport = 465
else:
defaultport = 25
- mailport = util.getport(ui.config(b'smtp', b'port', defaultport))
+ mailport = urlutil.getport(ui.config(b'smtp', b'port', defaultport))
ui.note(_(b'sending mail: smtp host %s, port %d\n') % (mailhost, mailport))
s.connect(host=mailhost, port=mailport)
if starttls:
@@ -150,6 +151,32 @@
if starttls or smtps:
ui.note(_(b'(verifying remote certificate)\n'))
sslutil.validatesocket(s.sock)
+
+ try:
+ _smtp_login(ui, s, mailhost, mailport)
+ except smtplib.SMTPException as inst:
+ raise error.Abort(stringutil.forcebytestr(inst))
+
+ def send(sender, recipients, msg):
+ try:
+ return s.sendmail(sender, recipients, msg)
+ except smtplib.SMTPRecipientsRefused as inst:
+ recipients = [r[1] for r in inst.recipients.values()]
+ raise error.Abort(b'\n' + b'\n'.join(recipients))
+ except smtplib.SMTPException as inst:
+ raise error.Abort(stringutil.forcebytestr(inst))
+
+ return send
+
+
+def _smtp_login(ui, smtp, mailhost, mailport):
+ """A hook for the keyring extension to perform the actual SMTP login.
+
+ An already connected SMTP object of the proper type is provided, based on
+ the current configuration. The host and port to which the connection was
+ established are provided for accessibility, since the SMTP object doesn't
+ provide an accessor. ``smtplib.SMTPException`` is raised on error.
+ """
username = ui.config(b'smtp', b'username')
password = ui.config(b'smtp', b'password')
if username:
@@ -162,21 +189,7 @@
if username and password:
ui.note(_(b'(authenticating to mail server as %s)\n') % username)
username = encoding.strfromlocal(username)
- try:
- s.login(username, password)
- except smtplib.SMTPException as inst:
- raise error.Abort(stringutil.forcebytestr(inst))
-
- def send(sender, recipients, msg):
- try:
- return s.sendmail(sender, recipients, msg)
- except smtplib.SMTPRecipientsRefused as inst:
- recipients = [r[1] for r in inst.recipients.values()]
- raise error.Abort(b'\n' + b'\n'.join(recipients))
- except smtplib.SMTPException as inst:
- raise error.Abort(inst)
-
- return send
+ smtp.login(username, password)
def _sendmail(ui, sender, recipients, msg):
@@ -207,17 +220,16 @@
def _mbox(mbox, sender, recipients, msg):
'''write mails to mbox'''
- fp = open(mbox, b'ab+')
- # Should be time.asctime(), but Windows prints 2-characters day
- # of month instead of one. Make them print the same thing.
- date = time.strftime('%a %b %d %H:%M:%S %Y', time.localtime())
- fp.write(
- b'From %s %s\n'
- % (encoding.strtolocal(sender), encoding.strtolocal(date))
- )
- fp.write(msg)
- fp.write(b'\n\n')
- fp.close()
+ with open(mbox, b'ab+') as fp:
+ # Should be time.asctime(), but Windows prints 2-characters day
+ # of month instead of one. Make them print the same thing.
+ date = time.strftime('%a %b %d %H:%M:%S %Y', time.localtime())
+ fp.write(
+ b'From %s %s\n'
+ % (encoding.strtolocal(sender), encoding.strtolocal(date))
+ )
+ fp.write(msg)
+ fp.write(b'\n\n')
def connect(ui, mbox=None):
diff -r bc268ea9f984 -r f67b8946bb1b mercurial/manifest.py
--- a/mercurial/manifest.py Thu Mar 25 19:06:28 2021 -0400
+++ b/mercurial/manifest.py Tue Apr 20 11:01:06 2021 -0400
@@ -1,6 +1,6 @@
# manifest.py - manifest revision class for mercurial
#
-# Copyright 2005-2007 Matt Mackall
+# Copyright 2005-2007 Olivia Mackall
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
@@ -792,8 +792,9 @@
@interfaceutil.implementer(repository.imanifestdict)
class treemanifest(object):
- def __init__(self, dir=b'', text=b''):
+ def __init__(self, nodeconstants, dir=b'', text=b''):
self._dir = dir
+ self.nodeconstants = nodeconstants
self._node = nullid
self._loadfunc = _noop
self._copyfunc = _noop
@@ -1051,7 +1052,9 @@
if dir:
self._loadlazy(dir)
if dir not in self._dirs:
- self._dirs[dir] = treemanifest(self._subpath(dir))
+ self._dirs[dir] = treemanifest(
+ self.nodeconstants, self._subpath(dir)
+ )
self._dirs[dir].__setitem__(subpath, n)
else:
# manifest nodes are either 20 bytes or 32 bytes,
@@ -1078,14 +1081,16 @@
if dir:
self._loadlazy(dir)
if dir not in self._dirs:
- self._dirs[dir] = treemanifest(self._subpath(dir))
+ self._dirs[dir] = treemanifest(
+ self.nodeconstants, self._subpath(dir)
+ )
self._dirs[dir].setflag(subpath, flags)
else:
self._flags[f] = flags
self._dirty = True
def copy(self):
- copy = treemanifest(self._dir)
+ copy = treemanifest(self.nodeconstants, self._dir)
copy._node = self._node
copy._dirty = self._dirty
if self._copyfunc is _noop:
@@ -1215,7 +1220,7 @@
visit = match.visitchildrenset(self._dir[:-1])
if visit == b'all':
return self.copy()
- ret = treemanifest(self._dir)
+ ret = treemanifest(self.nodeconstants, self._dir)
if not visit:
return ret
@@ -1272,7 +1277,7 @@
m2 = m2._matches(match)
return m1.diff(m2, clean=clean)
result = {}
- emptytree = treemanifest()
+ emptytree = treemanifest(self.nodeconstants)
def _iterativediff(t1, t2, stack):
"""compares two tree manifests and append new tree-manifests which
@@ -1368,7 +1373,7 @@
self._load() # for consistency; should never have any effect here
m1._load()
m2._load()
- emptytree = treemanifest()
+ emptytree = treemanifest(self.nodeconstants)
def getnode(m, d):
ld = m._lazydirs.get(d)
@@ -1551,6 +1556,7 @@
def __init__(
self,
+ nodeconstants,
opener,
tree=b'',
dirlogcache=None,
@@ -1567,6 +1573,7 @@
option takes precedence, so if it is set to True, we ignore whatever
value is passed in to the constructor.
"""
+ self.nodeconstants = nodeconstants
# During normal operations, we expect to deal with not more than four
# revs at a time (such as during commit --amend). When rebasing large
# stacks of commits, the number can go up, hence the config knob below.
@@ -1610,6 +1617,7 @@
self.index = self._revlog.index
self.version = self._revlog.version
self._generaldelta = self._revlog._generaldelta
+ self._revlog.revlog_kind = b'manifest'
def _setupmanifestcachehooks(self, repo):
"""Persist the manifestfulltextcache on lock release"""
@@ -1653,7 +1661,11 @@
assert self._treeondisk
if d not in self._dirlogcache:
mfrevlog = manifestrevlog(
- self.opener, d, self._dirlogcache, treemanifest=self._treeondisk
+ self.nodeconstants,
+ self.opener,
+ d,
+ self._dirlogcache,
+ treemanifest=self._treeondisk,
)
self._dirlogcache[d] = mfrevlog
return self._dirlogcache[d]
@@ -1704,9 +1716,10 @@
arraytext, deltatext = m.fastdelta(self.fulltextcache[p1], work)
cachedelta = self._revlog.rev(p1), deltatext
text = util.buffer(arraytext)
- n = self._revlog.addrevision(
+ rev = self._revlog.addrevision(
text, transaction, link, p1, p2, cachedelta
)
+ n = self._revlog.node(rev)
except FastdeltaUnavailable:
# The first parent manifest isn't already loaded or the
# manifest implementation doesn't support fastdelta, so
@@ -1724,7 +1737,8 @@
arraytext = None
else:
text = m.text()
- n = self._revlog.addrevision(text, transaction, link, p1, p2)
+ rev = self._revlog.addrevision(text, transaction, link, p1, p2)
+ n = self._revlog.node(rev)
arraytext = bytearray(text)
if arraytext is not None:
@@ -1765,9 +1779,10 @@
n = m2.node()
if not n:
- n = self._revlog.addrevision(
+ rev = self._revlog.addrevision(
text, transaction, link, m1.node(), m2.node()
)
+ n = self._revlog.node(rev)
# Save nodeid so parent manifest can calculate its nodeid
m.setnode(n)
@@ -1822,6 +1837,7 @@
revisiondata=False,
assumehaveparentrevisions=False,
deltamode=repository.CG_DELTAMODE_STD,
+ sidedata_helpers=None,
):
return self._revlog.emitrevisions(
nodes,
@@ -1829,6 +1845,7 @@
revisiondata=revisiondata,
assumehaveparentrevisions=assumehaveparentrevisions,
deltamode=deltamode,
+ sidedata_helpers=sidedata_helpers,
)
def addgroup(
@@ -1836,6 +1853,7 @@
deltas,
linkmapper,
transaction,
+ alwayscache=False,
addrevisioncb=None,
duplicaterevisioncb=None,
):
@@ -1843,6 +1861,7 @@
deltas,
linkmapper,
transaction,
+ alwayscache=alwayscache,
addrevisioncb=addrevisioncb,
duplicaterevisioncb=duplicaterevisioncb,
)
@@ -1909,6 +1928,7 @@
they receive (i.e. tree or flat or lazily loaded, etc)."""
def __init__(self, opener, repo, rootstore, narrowmatch):
+ self.nodeconstants = repo.nodeconstants
usetreemanifest = False
cachesize = 4
@@ -1947,7 +1967,7 @@
if not self._narrowmatch.always():
if not self._narrowmatch.visitdir(tree[:-1]):
- return excludeddirmanifestctx(tree, node)
+ return excludeddirmanifestctx(self.nodeconstants, tree, node)
if tree:
if self._rootstore._treeondisk:
if verify:
@@ -2110,7 +2130,7 @@
def __init__(self, manifestlog, dir=b''):
self._manifestlog = manifestlog
self._dir = dir
- self._treemanifest = treemanifest()
+ self._treemanifest = treemanifest(manifestlog.nodeconstants)
def _storage(self):
return self._manifestlog.getstorage(b'')
@@ -2160,17 +2180,19 @@
narrowmatch = self._manifestlog._narrowmatch
if not narrowmatch.always():
if not narrowmatch.visitdir(self._dir[:-1]):
- return excludedmanifestrevlog(self._dir)
+ return excludedmanifestrevlog(
+ self._manifestlog.nodeconstants, self._dir
+ )
return self._manifestlog.getstorage(self._dir)
def read(self):
if self._data is None:
store = self._storage()
if self._node == nullid:
- self._data = treemanifest()
+ self._data = treemanifest(self._manifestlog.nodeconstants)
# TODO accessing non-public API
elif store._treeondisk:
- m = treemanifest(dir=self._dir)
+ m = treemanifest(self._manifestlog.nodeconstants, dir=self._dir)
def gettext():
return store.revision(self._node)
@@ -2190,7 +2212,9 @@
text = store.revision(self._node)
arraytext = bytearray(text)
store.fulltextcache[self._node] = arraytext
- self._data = treemanifest(dir=self._dir, text=text)
+ self._data = treemanifest(
+ self._manifestlog.nodeconstants, dir=self._dir, text=text
+ )
return self._data
@@ -2227,7 +2251,7 @@
r0 = store.deltaparent(store.rev(self._node))
m0 = self._manifestlog.get(self._dir, store.node(r0)).read()
m1 = self.read()
- md = treemanifest(dir=self._dir)
+ md = treemanifest(self._manifestlog.nodeconstants, dir=self._dir)
for f, ((n0, fl0), (n1, fl1)) in pycompat.iteritems(m0.diff(m1)):
if n1:
md[f] = n1
@@ -2270,8 +2294,8 @@
whose contents are unknown.
"""
- def __init__(self, dir, node):
- super(excludeddir, self).__init__(dir)
+ def __init__(self, nodeconstants, dir, node):
+ super(excludeddir, self).__init__(nodeconstants, dir)
self._node = node
# Add an empty file, which will be included by iterators and such,
# appearing as the directory itself (i.e. something like "dir/")
@@ -2290,12 +2314,13 @@
class excludeddirmanifestctx(treemanifestctx):
"""context wrapper for excludeddir - see that docstring for rationale"""
- def __init__(self, dir, node):
+ def __init__(self, nodeconstants, dir, node):
+ self.nodeconstants = nodeconstants
self._dir = dir
self._node = node
def read(self):
- return excludeddir(self._dir, self._node)
+ return excludeddir(self.nodeconstants, self._dir, self._node)
def readfast(self, shallow=False):
# special version of readfast since we don't have underlying storage
@@ -2317,7 +2342,8 @@
outside the narrowspec.
"""
- def __init__(self, dir):
+ def __init__(self, nodeconstants, dir):
+ self.nodeconstants = nodeconstants
self._dir = dir
def __len__(self):
diff -r bc268ea9f984 -r f67b8946bb1b mercurial/match.py
--- a/mercurial/match.py Thu Mar 25 19:06:28 2021 -0400
+++ b/mercurial/match.py Tue Apr 20 11:01:06 2021 -0400
@@ -1,12 +1,13 @@
# match.py - filename matching
#
-# Copyright 2008, 2009 Matt Mackall and others
+# Copyright 2008, 2009 Olivia Mackall and others
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
from __future__ import absolute_import, print_function
+import bisect
import copy
import itertools
import os
@@ -798,14 +799,38 @@
def visitdir(self, dir):
return dir in self._dirs
+ @propertycache
+ def _visitchildrenset_candidates(self):
+ """A memoized set of candidates for visitchildrenset."""
+ return self._fileset | self._dirs - {b''}
+
+ @propertycache
+ def _sorted_visitchildrenset_candidates(self):
+ """A memoized sorted list of candidates for visitchildrenset."""
+ return sorted(self._visitchildrenset_candidates)
+
def visitchildrenset(self, dir):
if not self._fileset or dir not in self._dirs:
return set()
- candidates = self._fileset | self._dirs - {b''}
- if dir != b'':
+ if dir == b'':
+ candidates = self._visitchildrenset_candidates
+ else:
+ candidates = self._sorted_visitchildrenset_candidates
d = dir + b'/'
- candidates = {c[len(d) :] for c in candidates if c.startswith(d)}
+ # Use bisect to find the first element potentially starting with d
+ # (i.e. >= d). This should always find at least one element (we'll
+ # assert later if this is not the case).
+ first = bisect.bisect_left(candidates, d)
+ # We need a representation of the first element that is > d that
+ # does not start with d, so since we added a `/` on the end of dir,
+ # we'll add whatever comes after slash (we could probably assume
+ # that `0` is after `/`, but let's not) to the end of dir instead.
+ dnext = dir + encoding.strtolocal(chr(ord(b'/') + 1))
+ # Use bisect to find the first element >= d_next
+ last = bisect.bisect_left(candidates, dnext, lo=first)
+ dlen = len(d)
+ candidates = {c[dlen:] for c in candidates[first:last]}
# self._dirs includes all of the directories, recursively, so if
# we're attempting to match foo/bar/baz.txt, it'll have '', 'foo',
# 'foo/bar' in it. Thus we can safely ignore a candidate that has a
diff -r bc268ea9f984 -r f67b8946bb1b mercurial/mdiff.py
--- a/mercurial/mdiff.py Thu Mar 25 19:06:28 2021 -0400
+++ b/mercurial/mdiff.py Tue Apr 20 11:01:06 2021 -0400
@@ -1,6 +1,6 @@
# mdiff.py - diff and patch routines for mercurial
#
-# Copyright 2005, 2006 Matt Mackall
+# Copyright 2005, 2006 Olivia Mackall
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
diff -r bc268ea9f984 -r f67b8946bb1b mercurial/merge.py
--- a/mercurial/merge.py Thu Mar 25 19:06:28 2021 -0400
+++ b/mercurial/merge.py Tue Apr 20 11:01:06 2021 -0400
@@ -1,6 +1,6 @@
# merge.py - directory-level update/merge handling for Mercurial
#
-# Copyright 2006, 2007 Matt Mackall
+# Copyright 2006, 2007 Olivia Mackall
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
@@ -234,7 +234,7 @@
else:
warn(_(b"%s: untracked file differs\n") % f)
if abortconflicts:
- raise error.Abort(
+ raise error.StateError(
_(
b"untracked files in working directory "
b"differ from files in requested revision"
@@ -342,7 +342,7 @@
for f in pmmf:
fold = util.normcase(f)
if fold in foldmap:
- raise error.Abort(
+ raise error.StateError(
_(b"case-folding collision between %s and %s")
% (f, foldmap[fold])
)
@@ -353,7 +353,7 @@
for fold, f in sorted(foldmap.items()):
if fold.startswith(foldprefix) and not f.startswith(unfoldprefix):
# the folded prefix matches but actual casing is different
- raise error.Abort(
+ raise error.StateError(
_(b"case-folding collision between %s and directory of %s")
% (lastfull, f)
)
@@ -505,7 +505,9 @@
if invalidconflicts:
for p in invalidconflicts:
repo.ui.warn(_(b"%s: is both a file and a directory\n") % p)
- raise error.Abort(_(b"destination manifest contains path conflicts"))
+ raise error.StateError(
+ _(b"destination manifest contains path conflicts")
+ )
def _filternarrowactions(narrowmatch, branchmerge, mresult):
@@ -1696,6 +1698,7 @@
tocomplete = []
for f, args, msg in mergeactions:
repo.ui.debug(b" %s: %s -> m (premerge)\n" % (f, msg))
+ ms.addcommitinfo(f, {b'merged': b'yes'})
progress.increment(item=f)
if f == b'.hgsubstate': # subrepo states need updating
subrepoutil.submerge(
@@ -1711,6 +1714,7 @@
# merge
for f, args, msg in tocomplete:
repo.ui.debug(b" %s: %s -> m (merge)\n" % (f, msg))
+ ms.addcommitinfo(f, {b'merged': b'yes'})
progress.increment(item=f, total=numupdates)
ms.resolve(f, wctx)
@@ -1919,10 +1923,10 @@
### check phase
if not overwrite:
if len(pl) > 1:
- raise error.Abort(_(b"outstanding uncommitted merge"))
+ raise error.StateError(_(b"outstanding uncommitted merge"))
ms = wc.mergestate()
- if list(ms.unresolved()):
- raise error.Abort(
+ if ms.unresolvedcount():
+ raise error.StateError(
_(b"outstanding merge conflicts"),
hint=_(b"use 'hg resolve' to resolve"),
)
@@ -2008,7 +2012,7 @@
if mresult.hasconflicts():
msg = _(b"conflicting changes")
hint = _(b"commit or update --clean to discard changes")
- raise error.Abort(msg, hint=hint)
+ raise error.StateError(msg, hint=hint)
# Prompt and create actions. Most of this is in the resolve phase
# already, but we can't handle .hgsubstate in filemerge or
@@ -2325,6 +2329,7 @@
removefiles=True,
abortonerror=False,
noop=False,
+ confirm=False,
):
"""Purge the working directory of untracked files.
@@ -2345,6 +2350,8 @@
``noop`` controls whether to actually remove files. If not defined, actions
will be taken.
+ ``confirm`` ask confirmation before actually removing anything.
+
Returns an iterable of relative paths in the working directory that were
or would be removed.
"""
@@ -2372,6 +2379,35 @@
status = repo.status(match=matcher, ignored=ignored, unknown=unknown)
+ if confirm:
+ nb_ignored = len(status.ignored)
+ nb_unkown = len(status.unknown)
+ if nb_unkown and nb_ignored:
+ msg = _(b"permanently delete %d unkown and %d ignored files?")
+ msg %= (nb_unkown, nb_ignored)
+ elif nb_unkown:
+ msg = _(b"permanently delete %d unkown files?")
+ msg %= nb_unkown
+ elif nb_ignored:
+ msg = _(b"permanently delete %d ignored files?")
+ msg %= nb_ignored
+ elif removeemptydirs:
+ dir_count = 0
+ for f in directories:
+ if matcher(f) and not repo.wvfs.listdir(f):
+ dir_count += 1
+ if dir_count:
+ msg = _(
+ b"permanently delete at least %d empty directories?"
+ )
+ msg %= dir_count
+ else:
+ # XXX we might be missing directory there
+ return res
+ msg += b" (yN)$$ &Yes $$ &No"
+ if repo.ui.promptchoice(msg, default=1) == 1:
+ raise error.CanceledError(_(b'removal cancelled'))
+
if removefiles:
for f in sorted(status.unknown + status.ignored):
if not noop:
diff -r bc268ea9f984 -r f67b8946bb1b mercurial/mergestate.py
--- a/mercurial/mergestate.py Thu Mar 25 19:06:28 2021 -0400
+++ b/mercurial/mergestate.py Tue Apr 20 11:01:06 2021 -0400
@@ -10,7 +10,7 @@
bin,
hex,
nullhex,
- nullid,
+ nullrev,
)
from . import (
error,
@@ -341,7 +341,7 @@
flo = fco.flags()
fla = fca.flags()
if b'x' in flags + flo + fla and b'l' not in flags + flo + fla:
- if fca.node() == nullid and flags != flo:
+ if fca.rev() == nullrev and flags != flo:
if preresolve:
self._repo.ui.warn(
_(
@@ -382,7 +382,6 @@
if merge_ret is None:
# If return value of merge is None, then there are no real conflict
del self._state[dfile]
- self._stateextras.pop(dfile, None)
self._dirty = True
elif not merge_ret:
self.mark(dfile, MERGE_RECORD_RESOLVED)
diff -r bc268ea9f984 -r f67b8946bb1b mercurial/mergeutil.py
--- a/mercurial/mergeutil.py Thu Mar 25 19:06:28 2021 -0400
+++ b/mercurial/mergeutil.py Tue Apr 20 11:01:06 2021 -0400
@@ -1,6 +1,6 @@
# mergeutil.py - help for merge processing in mercurial
#
-# Copyright 2005-2007 Matt Mackall
+# Copyright 2005-2007 Olivia Mackall
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
@@ -13,7 +13,7 @@
def checkunresolved(ms):
- if list(ms.unresolved()):
+ if ms.unresolvedcount():
raise error.StateError(
_(b"unresolved merge conflicts (see 'hg help resolve')")
)
diff -r bc268ea9f984 -r f67b8946bb1b mercurial/metadata.py
--- a/mercurial/metadata.py Thu Mar 25 19:06:28 2021 -0400
+++ b/mercurial/metadata.py Tue Apr 20 11:01:06 2021 -0400
@@ -18,6 +18,7 @@
from . import (
error,
pycompat,
+ requirements as requirementsmod,
util,
)
@@ -321,12 +322,12 @@
│ (Some, None) │ OR │🄻 Deleted │ ø │ ø │
│ │🄷 Deleted[1] │ │ │ │
├──────────────┼──────────────┼──────────────┼──────────────┼──────────────┤
- │ │🄸 No Changes │ │ │ │
- │ (None, Some) │ OR │ ø │🄼 Added │🄽 Merged │
+ │ │🄸 No Changes │ │ │ 🄽 Touched │
+ │ (None, Some) │ OR │ ø │🄼 Added │OR 🅀 Salvaged │
│ │🄹 Salvaged[2]│ │ (copied?) │ (copied?) │
├──────────────┼──────────────┼──────────────┼──────────────┼──────────────┤
- │ │ │ │ │ │
- │ (Some, Some) │🄺 No Changes │ ø │🄾 Merged │🄿 Merged │
+ │ │ │ │ 🄾 Touched │ 🄿 Merged │
+ │ (Some, Some) │🄺 No Changes │ ø │OR 🅁 Salvaged │OR 🅂 Touched │
│ │ [3] │ │ (copied?) │ (copied?) │
└──────────────┴──────────────┴──────────────┴──────────────┴──────────────┘
@@ -414,6 +415,7 @@
nice bonus. However do not any of this yet.
"""
+ repo = ctx.repo()
md = ChangingFiles()
m = ctx.manifest()
@@ -453,8 +455,23 @@
# case 🄻 — both deleted the file.
md.mark_removed(filename)
elif d1[1][0] is not None and d2[1][0] is not None:
- # case 🄽 🄾 🄿
- md.mark_merged(filename)
+ if d1[0][0] is None or d2[0][0] is None:
+ if any(_find(ma, filename) is not None for ma in mas):
+ # case 🅀 or 🅁
+ md.mark_salvaged(filename)
+ else:
+ # case 🄽 🄾 : touched
+ md.mark_touched(filename)
+ else:
+ fctx = repo.filectx(filename, fileid=d1[1][0])
+ if fctx.p2().rev() == nullrev:
+ # case 🅂
+ # lets assume we can trust the file history. If the
+ # filenode is not a merge, the file was not merged.
+ md.mark_touched(filename)
+ else:
+ # case 🄿
+ md.mark_merged(filename)
copy_candidates.append(filename)
else:
# Impossible case, the post-merge file status cannot be None on
@@ -804,6 +821,21 @@
return encode_files_sidedata(files), files.has_copies_info
+def copies_sidedata_computer(repo, revlog, rev, existing_sidedata):
+ return _getsidedata(repo, rev)[0]
+
+
+def set_sidedata_spec_for_repo(repo):
+ if requirementsmod.COPIESSDC_REQUIREMENT in repo.requirements:
+ repo.register_wanted_sidedata(sidedatamod.SD_FILES)
+ repo.register_sidedata_computer(
+ b"changelog",
+ sidedatamod.SD_FILES,
+ (sidedatamod.SD_FILES,),
+ copies_sidedata_computer,
+ )
+
+
def getsidedataadder(srcrepo, destrepo):
use_w = srcrepo.ui.configbool(b'experimental', b'worker.repository-upgrade')
if pycompat.iswindows or not use_w:
@@ -882,14 +914,14 @@
data = {}, False
if util.safehasattr(revlog, b'filteredrevs'): # this is a changelog
# Is the data previously shelved ?
- sidedata = staging.pop(rev, None)
- if sidedata is None:
+ data = staging.pop(rev, None)
+ if data is None:
# look at the queued result until we find the one we are lookig
# for (shelve the other ones)
r, data = sidedataq.get()
while r != rev:
staging[r] = data
- r, sidedata = sidedataq.get()
+ r, data = sidedataq.get()
tokens.release()
sidedata, has_copies_info = data
new_flag = 0
diff -r bc268ea9f984 -r f67b8946bb1b mercurial/minirst.py
--- a/mercurial/minirst.py Thu Mar 25 19:06:28 2021 -0400
+++ b/mercurial/minirst.py Tue Apr 20 11:01:06 2021 -0400
@@ -1,6 +1,6 @@
# minirst.py - minimal reStructuredText parser
#
-# Copyright 2009, 2010 Matt Mackall and others
+# Copyright 2009, 2010 Olivia Mackall and others
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
@@ -158,7 +158,7 @@
_optionre = re.compile(
br'^(-([a-zA-Z0-9]), )?(--[a-z0-9-]+)' br'((.*) +)(.*)$'
)
-_fieldre = re.compile(br':(?![: ])([^:]*)(?
+ Copyright 2005, 2006 Olivia Mackall
This software may be used and distributed according to the terms
of the GNU General Public License, incorporated herein by reference.
diff -r bc268ea9f984 -r f67b8946bb1b mercurial/narrowspec.py
--- a/mercurial/narrowspec.py Thu Mar 25 19:06:28 2021 -0400
+++ b/mercurial/narrowspec.py Tue Apr 20 11:01:06 2021 -0400
@@ -329,7 +329,6 @@
trackeddirty = status.modified + status.added
clean = status.clean
if assumeclean:
- assert not trackeddirty
clean.extend(lookup)
else:
trackeddirty.extend(lookup)
diff -r bc268ea9f984 -r f67b8946bb1b mercurial/node.py
--- a/mercurial/node.py Thu Mar 25 19:06:28 2021 -0400
+++ b/mercurial/node.py Tue Apr 20 11:01:06 2021 -0400
@@ -1,6 +1,6 @@
# node.py - basic nodeid manipulation for mercurial
#
-# Copyright 2005, 2006 Matt Mackall
+# Copyright 2005, 2006 Olivia Mackall
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
@@ -21,29 +21,48 @@
raise TypeError(e)
-nullrev = -1
-# In hex, this is '0000000000000000000000000000000000000000'
-nullid = b"\0" * 20
-nullhex = hex(nullid)
+def short(node):
+ return hex(node[:6])
+
-# Phony node value to stand-in for new files in some uses of
-# manifests.
-# In hex, this is '2121212121212121212121212121212121212121'
-newnodeid = b'!!!!!!!!!!!!!!!!!!!!'
-# In hex, this is '3030303030303030303030303030306164646564'
-addednodeid = b'000000000000000added'
-# In hex, this is '3030303030303030303030306d6f646966696564'
-modifiednodeid = b'000000000000modified'
+nullrev = -1
-wdirfilenodeids = {newnodeid, addednodeid, modifiednodeid}
-
-# pseudo identifiers for working directory
-# (they are experimental, so don't add too many dependencies on them)
+# pseudo identifier for working directory
+# (experimental, so don't add too many dependencies on it)
wdirrev = 0x7FFFFFFF
-# In hex, this is 'ffffffffffffffffffffffffffffffffffffffff'
-wdirid = b"\xff" * 20
-wdirhex = hex(wdirid)
-def short(node):
- return hex(node[:6])
+class sha1nodeconstants(object):
+ nodelen = 20
+
+ # In hex, this is '0000000000000000000000000000000000000000'
+ nullid = b"\0" * nodelen
+ nullhex = hex(nullid)
+
+ # Phony node value to stand-in for new files in some uses of
+ # manifests.
+ # In hex, this is '2121212121212121212121212121212121212121'
+ newnodeid = b'!!!!!!!!!!!!!!!!!!!!'
+ # In hex, this is '3030303030303030303030303030306164646564'
+ addednodeid = b'000000000000000added'
+ # In hex, this is '3030303030303030303030306d6f646966696564'
+ modifiednodeid = b'000000000000modified'
+
+ wdirfilenodeids = {newnodeid, addednodeid, modifiednodeid}
+
+ # pseudo identifier for working directory
+ # (experimental, so don't add too many dependencies on it)
+ # In hex, this is 'ffffffffffffffffffffffffffffffffffffffff'
+ wdirid = b"\xff" * nodelen
+ wdirhex = hex(wdirid)
+
+
+# legacy starting point for porting modules
+nullid = sha1nodeconstants.nullid
+nullhex = sha1nodeconstants.nullhex
+newnodeid = sha1nodeconstants.newnodeid
+addednodeid = sha1nodeconstants.addednodeid
+modifiednodeid = sha1nodeconstants.modifiednodeid
+wdirfilenodeids = sha1nodeconstants.wdirfilenodeids
+wdirid = sha1nodeconstants.wdirid
+wdirhex = sha1nodeconstants.wdirhex
diff -r bc268ea9f984 -r f67b8946bb1b mercurial/obsolete.py
--- a/mercurial/obsolete.py Thu Mar 25 19:06:28 2021 -0400
+++ b/mercurial/obsolete.py Tue Apr 20 11:01:06 2021 -0400
@@ -560,10 +560,11 @@
# parents: (tuple of nodeid) or None, parents of predecessors
# None is used when no data has been recorded
- def __init__(self, svfs, defaultformat=_fm1version, readonly=False):
+ def __init__(self, repo, svfs, defaultformat=_fm1version, readonly=False):
# caches for various obsolescence related cache
self.caches = {}
self.svfs = svfs
+ self.repo = repo
self._defaultformat = defaultformat
self._readonly = readonly
@@ -806,7 +807,7 @@
if defaultformat is not None:
kwargs['defaultformat'] = defaultformat
readonly = not isenabled(repo, createmarkersopt)
- store = obsstore(repo.svfs, readonly=readonly, **kwargs)
+ store = obsstore(repo, repo.svfs, readonly=readonly, **kwargs)
if store and readonly:
ui.warn(
_(b'obsolete feature not enabled but %i markers found!\n')
diff -r bc268ea9f984 -r f67b8946bb1b mercurial/parser.py
--- a/mercurial/parser.py Thu Mar 25 19:06:28 2021 -0400
+++ b/mercurial/parser.py Tue Apr 20 11:01:06 2021 -0400
@@ -1,6 +1,6 @@
# parser.py - simple top-down operator precedence parser for mercurial
#
-# Copyright 2010 Matt Mackall
+# Copyright 2010 Olivia Mackall
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
diff -r bc268ea9f984 -r f67b8946bb1b mercurial/pathutil.py
--- a/mercurial/pathutil.py Thu Mar 25 19:06:28 2021 -0400
+++ b/mercurial/pathutil.py Tue Apr 20 11:01:06 2021 -0400
@@ -15,11 +15,21 @@
util,
)
+if pycompat.TYPE_CHECKING:
+ from typing import (
+ Any,
+ Callable,
+ Iterator,
+ Optional,
+ )
+
+
rustdirs = policy.importrust('dirstate', 'Dirs')
parsers = policy.importmod('parsers')
def _lowerclean(s):
+ # type: (bytes) -> bytes
return encoding.hfsignoreclean(s.lower())
@@ -59,6 +69,7 @@
self.normcase = lambda x: x
def __call__(self, path, mode=None):
+ # type: (bytes, Optional[Any]) -> None
"""Check the relative path.
path may contain a pattern (e.g. foodir/**.txt)"""
@@ -119,6 +130,7 @@
self.audited.add(normpath)
def _checkfs(self, prefix, path):
+ # type: (bytes, bytes) -> None
"""raise exception if a file system backed check fails"""
curpath = os.path.join(self.root, prefix)
try:
@@ -143,6 +155,7 @@
raise error.Abort(msg % (path, pycompat.bytestr(prefix)))
def check(self, path):
+ # type: (bytes) -> bool
try:
self(path)
return True
@@ -164,6 +177,7 @@
def canonpath(root, cwd, myname, auditor=None):
+ # type: (bytes, bytes, bytes, Optional[pathauditor]) -> bytes
"""return the canonical path of myname, given cwd and root
>>> def check(root, cwd, myname):
@@ -266,6 +280,7 @@
def normasprefix(path):
+ # type: (bytes) -> bytes
"""normalize the specified path as path prefix
Returned value can be used safely for "p.startswith(prefix)",
@@ -289,6 +304,7 @@
def finddirs(path):
+ # type: (bytes) -> Iterator[bytes]
pos = path.rfind(b'/')
while pos != -1:
yield path[:pos]
@@ -318,6 +334,7 @@
addpath(f)
def addpath(self, path):
+ # type: (bytes) -> None
dirs = self._dirs
for base in finddirs(path):
if base.endswith(b'/'):
@@ -330,6 +347,7 @@
dirs[base] = 1
def delpath(self, path):
+ # type: (bytes) -> None
dirs = self._dirs
for base in finddirs(path):
if dirs[base] > 1:
@@ -341,6 +359,7 @@
return iter(self._dirs)
def __contains__(self, d):
+ # type: (bytes) -> bool
return d in self._dirs
@@ -355,4 +374,4 @@
# rather not let our internals know that we're thinking in posix terms
# - instead we'll let them be oblivious.
join = posixpath.join
-dirname = posixpath.dirname
+dirname = posixpath.dirname # type: Callable[[bytes], bytes]
diff -r bc268ea9f984 -r f67b8946bb1b mercurial/phases.py
--- a/mercurial/phases.py Thu Mar 25 19:06:28 2021 -0400
+++ b/mercurial/phases.py Tue Apr 20 11:01:06 2021 -0400
@@ -127,10 +127,32 @@
util,
)
+if pycompat.TYPE_CHECKING:
+ from typing import (
+ Any,
+ Callable,
+ Dict,
+ Iterable,
+ List,
+ Optional,
+ Set,
+ Tuple,
+ )
+ from . import (
+ localrepo,
+ ui as uimod,
+ )
+
+ Phaseroots = Dict[int, Set[bytes]]
+ Phasedefaults = List[
+ Callable[[localrepo.localrepository, Phaseroots], Phaseroots]
+ ]
+
+
_fphasesentry = struct.Struct(b'>i20s')
# record phase index
-public, draft, secret = range(3)
+public, draft, secret = range(3) # type: int
archived = 32 # non-continuous for compatibility
internal = 96 # non-continuous for compatibility
allphases = (public, draft, secret, archived, internal)
@@ -154,11 +176,13 @@
def supportinternal(repo):
+ # type: (localrepo.localrepository) -> bool
"""True if the internal phase can be used on a repository"""
return requirements.INTERNAL_PHASE_REQUIREMENT in repo.requirements
def _readroots(repo, phasedefaults=None):
+ # type: (localrepo.localrepository, Optional[Phasedefaults]) -> Tuple[Phaseroots, bool]
"""Read phase roots from disk
phasedefaults is a list of fn(repo, roots) callable, which are
@@ -191,6 +215,7 @@
def binaryencode(phasemapping):
+ # type: (Dict[int, List[bytes]]) -> bytes
"""encode a 'phase -> nodes' mapping into a binary stream
The revision lists are encoded as (phase, root) pairs.
@@ -203,6 +228,7 @@
def binarydecode(stream):
+ # type: (...) -> Dict[int, List[bytes]]
"""decode a binary stream into a 'phase -> nodes' mapping
The (phase, root) pairs are turned back into a dictionary with
@@ -321,6 +347,7 @@
class phasecache(object):
def __init__(self, repo, phasedefaults, _load=True):
+ # type: (localrepo.localrepository, Optional[Phasedefaults], bool) -> None
if _load:
# Cheap trick to allow shallow-copy without copy module
self.phaseroots, self.dirty = _readroots(repo, phasedefaults)
@@ -330,6 +357,7 @@
self.opener = repo.svfs
def hasnonpublicphases(self, repo):
+ # type: (localrepo.localrepository) -> bool
"""detect if there are revisions with non-public phase"""
repo = repo.unfiltered()
cl = repo.changelog
@@ -343,6 +371,7 @@
)
def nonpublicphaseroots(self, repo):
+ # type: (localrepo.localrepository) -> Set[bytes]
"""returns the roots of all non-public phases
The roots are not minimized, so if the secret revisions are
@@ -362,6 +391,8 @@
)
def getrevset(self, repo, phases, subset=None):
+ # type: (localrepo.localrepository, Iterable[int], Optional[Any]) -> Any
+ # TODO: finish typing this
"""return a smartset for the given phases"""
self.loadphaserevs(repo) # ensure phase's sets are loaded
phases = set(phases)
@@ -457,6 +488,7 @@
self._loadedrevslen = len(cl)
def loadphaserevs(self, repo):
+ # type: (localrepo.localrepository) -> None
"""ensure phase information is loaded in the object"""
if self._phasesets is None:
try:
@@ -470,6 +502,7 @@
self._phasesets = None
def phase(self, repo, rev):
+ # type: (localrepo.localrepository, int) -> int
# We need a repo argument here to be able to build _phasesets
# if necessary. The repository instance is not stored in
# phasecache to avoid reference cycles. The changelog instance
@@ -652,6 +685,7 @@
return False
def filterunknown(self, repo):
+ # type: (localrepo.localrepository) -> None
"""remove unknown nodes from the phase boundary
Nothing is lost as unknown nodes only hold data for their descendants.
@@ -729,6 +763,7 @@
def listphases(repo):
+ # type: (localrepo.localrepository) -> Dict[bytes, bytes]
"""List phases root for serialization over pushkey"""
# Use ordered dictionary so behavior is deterministic.
keys = util.sortdict()
@@ -760,6 +795,7 @@
def pushphase(repo, nhex, oldphasestr, newphasestr):
+ # type: (localrepo.localrepository, bytes, bytes, bytes) -> bool
"""List phases root for serialization over pushkey"""
repo = repo.unfiltered()
with repo.lock():
@@ -909,6 +945,7 @@
def newcommitphase(ui):
+ # type: (uimod.ui) -> int
"""helper to get the target phase of new commit
Handle all possible values for the phases.new-commit options.
@@ -924,11 +961,13 @@
def hassecret(repo):
+ # type: (localrepo.localrepository) -> bool
"""utility function that check if a repo have any secret changeset."""
return bool(repo._phasecache.phaseroots[secret])
def preparehookargs(node, old, new):
+ # type: (bytes, Optional[int], Optional[int]) -> Dict[bytes, bytes]
if old is None:
old = b''
else:
diff -r bc268ea9f984 -r f67b8946bb1b mercurial/posix.py
--- a/mercurial/posix.py Thu Mar 25 19:06:28 2021 -0400
+++ b/mercurial/posix.py Tue Apr 20 11:01:06 2021 -0400
@@ -1,6 +1,6 @@
# posix.py - Posix utility function implementations for Mercurial
#
-# Copyright 2005-2009 Matt Mackall and others
+# Copyright 2005-2009 Olivia Mackall