--- a/contrib/buildrpm Sat Jul 21 17:36:45 2007 +0200
+++ b/contrib/buildrpm Sat Jul 21 17:37:39 2007 +0200
@@ -29,7 +29,7 @@
version=`hg tags | perl -e 'while(<STDIN>){if(/^(\d\S+)/){print$1;exit}}'`
# Compute the release number as the difference in revision numbers
# between the tip and the most recent tag.
-release=`hg tags | perl -e 'while(<STDIN>){/^(\S+)\s+(\d+)/;if($1eq"tip"){$t=$2}else{print$t-$2+1;exit}}'`
+release=`hg tags | perl -e 'while(<STDIN>){($tag,$id)=/^(\S+)\s+(\d+)/;if($tag eq "tip"){$tip = $id}elsif($tag=~/^\d/){print $tip-$id+1;exit}}'`
tip=`hg -q tip`
# Beat up the spec file
@@ -40,6 +40,19 @@
-e 's,^%setup.*,,' \
$specfile > $tmpspec
+cat <<EOF >> $tmpspec
+%changelog
+* `date +'%a %b %d %Y'` `hg showconfig ui.username` $version-$release
+- Automatically built via $0
+
+EOF
+hg log \
+ --template '* {date|rfc822date} {author}\n- {desc|firstline}\n\n' \
+ .hgtags \
+ | sed -e 's/^\(\* [MTWFS][a-z][a-z]\), \([0-3][0-9]\) \([A-Z][a-z][a-z]\) /\1 \3 \2 /' \
+ -e '/^\* [MTWFS][a-z][a-z] /{s/ [012][0-9]:[0-9][0-9]:[0-9][0-9] [+-][0-9]\{4\}//}' \
+ >> $tmpspec
+
rpmbuild --define "_topdir $rpmdir" -bb $tmpspec
if [ $? = 0 ]; then
rm -rf $tmpspec $rpmdir/BUILD
--- a/contrib/churn.py Sat Jul 21 17:36:45 2007 +0200
+++ b/contrib/churn.py Sat Jul 21 17:37:39 2007 +0200
@@ -11,9 +11,34 @@
#
# <alias email> <actual email>
-import sys
from mercurial.i18n import gettext as _
from mercurial import hg, mdiff, cmdutil, ui, util, templater, node
+import os, sys
+
+def get_tty_width():
+ if 'COLUMNS' in os.environ:
+ try:
+ return int(os.environ['COLUMNS'])
+ except ValueError:
+ pass
+ try:
+ import termios, fcntl, struct
+ buf = 'abcd'
+ for dev in (sys.stdout, sys.stdin):
+ try:
+ if buf != 'abcd':
+ break
+ fd = dev.fileno()
+ if not os.isatty(fd):
+ continue
+ buf = fcntl.ioctl(fd, termios.TIOCGWINSZ, buf)
+ except ValueError:
+ pass
+ if buf != 'abcd':
+ return struct.unpack('hh', buf)[1]
+ except ImportError:
+ pass
+ return 80
def __gather(ui, repo, node1, node2):
def dirtywork(f, mmap1, mmap2):
@@ -159,8 +184,9 @@
maximum = ordered[0][1]
- ui.note("Assuming 80 character terminal\n")
- width = 80 - 1
+ width = get_tty_width()
+ ui.note(_("assuming %i character terminal\n") % width)
+ width -= 1
for i in ordered:
person = i[0]
--- a/contrib/macosx/Readme.html Sat Jul 21 17:36:45 2007 +0200
+++ b/contrib/macosx/Readme.html Sat Jul 21 17:37:39 2007 +0200
@@ -19,10 +19,14 @@
<p class="p2"><br></p>
<p class="p3">This is <i>not</i> a stand-alone version of Mercurial.</p>
<p class="p2"><br></p>
-<p class="p3">To use it, you must have the Universal MacPython 2.4.3 from <a href="http://www.python.org">www.python.org</a> installed.</p>
+<p class="p3">To use it, you must have the appropriate version of Universal MacPython from <a href="http://www.python.org">www.python.org</a> installed.</p>
<p class="p2"><br></p>
-<p class="p3">You can download MacPython 2.4.3 from here:</p>
-<p class="p4"><span class="s1"><a href="http://www.python.org/ftp/python/2.4.3/Universal-MacPython-2.4.3-2006-04-07.dmg">http://www.python.org/ftp/python/2.4.3/Universal-MacPython-2.4.3-2006-04-07.dmg</a></span></p>
+<p class="p3">You can find more information and download MacPython from here:</p>
+<p class="p4"><span class="s1"><a href="http://www.python.org/download">http://www.python.org/download</a></span></p>
+<p class="p2"><br></p>
+<p class="p3">Or direct links to the latest version are:</p>
+<p class="p4"><span class="s1"><a href="http://www.python.org/ftp/python/2.5.1/python-2.5.1-macosx.dmg">Python 2.5.1 for Macintosh OS X</a></span></p>
+<p class="p4"><span class="s1"><a href="http://www.python.org/ftp/python/2.4.4/python-2.4.4-macosx2006-10-18.dmg">Python 2.4.4 for Macintosh OS X</a></span></p>
<p class="p2"><br></p>
<p class="p1"><b>After you install</b></p>
<p class="p2"><br></p>
--- a/contrib/mercurial.el Sat Jul 21 17:36:45 2007 +0200
+++ b/contrib/mercurial.el Sat Jul 21 17:37:39 2007 +0200
@@ -1261,9 +1261,22 @@
(interactive)
(error "not implemented"))
-(defun hg-version-other-window ()
- (interactive)
- (error "not implemented"))
+(defun hg-version-other-window (rev)
+ "Visit version REV of the current file in another window.
+If the current file is named `F', the version is named `F.~REV~'.
+If `F.~REV~' already exists, use it instead of checking it out again."
+ (interactive "sVersion to visit (default is workfile version): ")
+ (let* ((file buffer-file-name)
+ (version (if (string-equal rev "")
+ "tip"
+ rev))
+ (automatic-backup (vc-version-backup-file-name file version))
+ (manual-backup (vc-version-backup-file-name file version 'manual)))
+ (unless (file-exists-p manual-backup)
+ (if (file-exists-p automatic-backup)
+ (rename-file automatic-backup manual-backup nil)
+ (hg-run0 "-q" "cat" "-r" version "-o" manual-backup file)))
+ (find-file-other-window manual-backup)))
(provide 'mercurial)
--- a/contrib/mercurial.spec Sat Jul 21 17:36:45 2007 +0200
+++ b/contrib/mercurial.spec Sat Jul 21 17:37:39 2007 +0200
@@ -8,6 +8,17 @@
URL: http://www.selenic.com/mercurial
BuildRoot: /tmp/build.%{name}-%{version}-%{release}
+# From the README:
+#
+# Note: some distributions fails to include bits of distutils by
+# default, you'll need python-dev to install. You'll also need a C
+# compiler and a 3-way merge tool like merge, tkdiff, or kdiff3.
+#
+# python-devel provides an adequate python-dev. The merge tool is a
+# run-time dependency.
+#
+BuildRequires: python >= 2.3, python-devel, make, gcc, asciidoc, xmlto
+
%define pythonver %(python -c 'import sys;print ".".join(map(str, sys.version_info[:2]))')
%define pythonlib %{_libdir}/python%{pythonver}/site-packages/%{name}
%define hgext %{_libdir}/python%{pythonver}/site-packages/hgext
@@ -21,23 +32,51 @@
%setup -q
%build
-python setup.py build
+make all
%install
-python setup.py install --root $RPM_BUILD_ROOT
+python setup.py install --root $RPM_BUILD_ROOT --prefix %{_prefix}
+make install-doc DESTDIR=$RPM_BUILD_ROOT MANDIR=%{_mandir}
+
+install contrib/hgk $RPM_BUILD_ROOT%{_bindir}
+install contrib/convert-repo $RPM_BUILD_ROOT%{_bindir}/mercurial-convert-repo
+install contrib/hg-ssh $RPM_BUILD_ROOT%{_bindir}
+install contrib/git-viz/{hg-viz,git-rev-tree} $RPM_BUILD_ROOT%{_bindir}
+
+bash_completion_dir=$RPM_BUILD_ROOT%{_sysconfdir}/bash_completion.d
+mkdir -p $bash_completion_dir
+install contrib/bash_completion $bash_completion_dir/mercurial.sh
+
+zsh_completion_dir=$RPM_BUILD_ROOT%{_datadir}/zsh/site-functions
+mkdir -p $zsh_completion_dir
+install contrib/zsh_completion $zsh_completion_dir/_mercurial
+
+lisp_dir=$RPM_BUILD_ROOT%{_datadir}/emacs/site-lisp
+mkdir -p $lisp_dir
+install contrib/mercurial.el $lisp_dir
%clean
rm -rf $RPM_BUILD_ROOT
%files
%defattr(-,root,root,-)
-%doc doc/* *.cgi
+%doc CONTRIBUTORS COPYING doc/README doc/hg*.txt doc/hg*.html doc/ja *.cgi
+%{_mandir}/man?/hg*.gz
%dir %{pythonlib}
%dir %{hgext}
+%{_sysconfdir}/bash_completion.d/mercurial.sh
+%{_datadir}/zsh/site-functions/_mercurial
+%{_datadir}/emacs/site-lisp/mercurial.el
+%{_bindir}/hg
+%{_bindir}/hgk
%{_bindir}/hgmerge
-%{_bindir}/hg
+%{_bindir}/hg-ssh
+%{_bindir}/hg-viz
+%{_bindir}/git-rev-tree
+%{_bindir}/mercurial-convert-repo
%{pythonlib}/templates
%{pythonlib}/*.py*
%{pythonlib}/hgweb/*.py*
%{pythonlib}/*.so
%{hgext}/*.py*
+%{hgext}/convert/*.py*
--- a/contrib/win32/mercurial.ini Sat Jul 21 17:36:45 2007 +0200
+++ b/contrib/win32/mercurial.ini Sat Jul 21 17:37:39 2007 +0200
@@ -1,41 +1,41 @@
-; System-wide Mercurial config file. To override these settings on a
-; per-user basis, please edit the following file instead, where
-; USERNAME is your Windows user name:
-; C:\Documents and Settings\USERNAME\Mercurial.ini
-
-[ui]
-editor = notepad
-
-; By default, we try to encode and decode all files that do not
-; contain ASCII NUL characters. What this means is that we try to set
-; line endings to Windows style on update, and to Unix style on
-; commit. This lets us cooperate with Linux and Unix users, so
-; everybody sees files with their native line endings.
-
-[extensions]
-; The win32text extension is available and installed by default. It
-; provides built-in Python hooks to perform line ending conversions.
-; This is normally much faster than running an external program.
-hgext.win32text =
-
-
-[encode]
-; Encode files that don't contain NUL characters.
-
-; ** = cleverencode:
-
-; Alternatively, you can explicitly specify each file extension that
-; you want encoded (any you omit will be left untouched), like this:
-
-; *.txt = dumbencode:
-
-
-[decode]
-; Decode files that don't contain NUL characters.
-
-; ** = cleverdecode:
-
-; Alternatively, you can explicitly specify each file extension that
-; you want decoded (any you omit will be left untouched), like this:
-
-; **.txt = dumbdecode:
+; System-wide Mercurial config file. To override these settings on a
+; per-user basis, please edit the following file instead, where
+; USERNAME is your Windows user name:
+; C:\Documents and Settings\USERNAME\Mercurial.ini
+
+[ui]
+editor = notepad
+
+; By default, we try to encode and decode all files that do not
+; contain ASCII NUL characters. What this means is that we try to set
+; line endings to Windows style on update, and to Unix style on
+; commit. This lets us cooperate with Linux and Unix users, so
+; everybody sees files with their native line endings.
+
+[extensions]
+; The win32text extension is available and installed by default. It
+; provides built-in Python hooks to perform line ending conversions.
+; This is normally much faster than running an external program.
+hgext.win32text =
+
+
+[encode]
+; Encode files that don't contain NUL characters.
+
+; ** = cleverencode:
+
+; Alternatively, you can explicitly specify each file extension that
+; you want encoded (any you omit will be left untouched), like this:
+
+; *.txt = dumbencode:
+
+
+[decode]
+; Decode files that don't contain NUL characters.
+
+; ** = cleverdecode:
+
+; Alternatively, you can explicitly specify each file extension that
+; you want decoded (any you omit will be left untouched), like this:
+
+; **.txt = dumbdecode:
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/hgext/alias.py Sat Jul 21 17:37:39 2007 +0200
@@ -0,0 +1,77 @@
+# Copyright (C) 2007 Brendan Cully <brendan@kublai.com>
+# This file is published under the GNU GPL.
+
+'''allow user-defined command aliases
+
+To use, create entries in your hgrc of the form
+
+[alias]
+mycmd = cmd --args
+'''
+
+from mercurial.cmdutil import findcmd, UnknownCommand, AmbiguousCommand
+from mercurial import commands
+
+cmdtable = {}
+
+class RecursiveCommand(Exception): pass
+
+class lazycommand(object):
+ '''defer command lookup until needed, so that extensions loaded
+ after alias can be aliased'''
+ def __init__(self, ui, name, target):
+ self._ui = ui
+ self._name = name
+ self._target = target
+ self._cmd = None
+
+ def __len__(self):
+ self._resolve()
+ return len(self._cmd)
+
+ def __getitem__(self, key):
+ self._resolve()
+ return self._cmd[key]
+
+ def __iter__(self):
+ self._resolve()
+ return self._cmd.__iter__()
+
+ def _resolve(self):
+ if self._cmd is not None:
+ return
+
+ try:
+ self._cmd = findcmd(self._ui, self._target)[1]
+ if self._cmd == self:
+ raise RecursiveCommand()
+ if self._target in commands.norepo.split(' '):
+ commands.norepo += ' %s' % self._name
+ return
+ except UnknownCommand:
+ msg = '*** [alias] %s: command %s is unknown' % \
+ (self._name, self._target)
+ except AmbiguousCommand:
+ msg = '*** [alias] %s: command %s is ambiguous' % \
+ (self._name, self._target)
+ except RecursiveCommand:
+ msg = '*** [alias] %s: circular dependency on %s' % \
+ (self._name, self._target)
+ def nocmd(*args, **opts):
+ self._ui.warn(msg + '\n')
+ return 1
+ nocmd.__doc__ = msg
+ self._cmd = (nocmd, [], '')
+ commands.norepo += ' %s' % self._name
+
+def uisetup(ui):
+ for cmd, target in ui.configitems('alias'):
+ if not target:
+ ui.warn('*** [alias] %s: no definition\n' % cmd)
+ continue
+ args = target.split(' ')
+ tcmd = args.pop(0)
+ if args:
+ pui = ui.parentui or ui
+ pui.setconfig('defaults', cmd, ' '.join(args))
+ cmdtable[cmd] = lazycommand(ui, cmd, tcmd)
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/hgext/children.py Sat Jul 21 17:37:39 2007 +0200
@@ -0,0 +1,41 @@
+# Mercurial extension to provide the 'hg children' command
+#
+# Copyright 2007 by Intevation GmbH <intevation@intevation.de>
+# Author(s):
+# Thomas Arendsen Hein <thomas@intevation.de>
+#
+# This software may be used and distributed according to the terms
+# of the GNU General Public License, incorporated herein by reference.
+
+from mercurial import cmdutil
+from mercurial.i18n import _
+
+
+def children(ui, repo, file_=None, **opts):
+ """show the children of the given or working dir revision
+
+ Print the children of the working directory's revisions.
+ If a revision is given via --rev, the children of that revision
+ will be printed. If a file argument is given, revision in
+ which the file was last changed (after the working directory
+ revision or the argument to --rev if given) is printed.
+ """
+ rev = opts.get('rev')
+ if file_:
+ ctx = repo.filectx(file_, changeid=rev)
+ else:
+ ctx = repo.changectx(rev)
+
+ displayer = cmdutil.show_changeset(ui, repo, opts)
+ for node in [cp.node() for cp in ctx.children()]:
+ displayer.show(changenode=node)
+
+
+cmdtable = {
+ "children":
+ (children,
+ [('r', 'rev', '', _('show children of the specified rev')),
+ ('', 'style', '', _('display using template map file')),
+ ('', 'template', '', _('display with template'))],
+ _('hg children [-r REV] [FILE]')),
+}
--- a/hgext/convert/__init__.py Sat Jul 21 17:36:45 2007 +0200
+++ b/hgext/convert/__init__.py Sat Jul 21 17:37:39 2007 +0200
@@ -5,27 +5,40 @@
# This software may be used and distributed according to the terms
# of the GNU General Public License, incorporated herein by reference.
-from common import NoRepo
+from common import NoRepo, converter_source, converter_sink
from cvs import convert_cvs
from git import convert_git
from hg import convert_mercurial
+from subversion import convert_svn
-import os
+import os, shutil
from mercurial import hg, ui, util, commands
commands.norepo += " convert"
-converters = [convert_cvs, convert_git, convert_mercurial]
+converters = [convert_cvs, convert_git, convert_svn, convert_mercurial]
-def converter(ui, path):
+def convertsource(ui, path, **opts):
+ for c in converters:
+ if not hasattr(c, 'getcommit'):
+ continue
+ try:
+ return c(ui, path, **opts)
+ except NoRepo:
+ pass
+ raise util.Abort('%s: unknown repository type' % path)
+
+def convertsink(ui, path):
if not os.path.isdir(path):
raise util.Abort("%s: not a directory" % path)
for c in converters:
+ if not hasattr(c, 'putcommit'):
+ continue
try:
return c(ui, path)
except NoRepo:
pass
- raise util.Abort("%s: unknown repository type" % path)
+ raise util.Abort('%s: unknown repository type' % path)
class convert(object):
def __init__(self, ui, source, dest, mapfile, opts):
@@ -180,6 +193,8 @@
c = self.commitcache[rev]
files = self.source.getchanges(rev)
+ do_copies = (hasattr(c, 'copies') and hasattr(self.dest, 'copyfile'))
+
for f, v in files:
try:
data = self.source.getfile(f, v)
@@ -188,6 +203,11 @@
else:
e = self.source.getmode(f, v)
self.dest.putfile(f, e, data)
+ if do_copies:
+ if f in c.copies:
+ # Merely marks that a copy happened.
+ self.dest.copyfile(c.copies[f], f)
+
r = [self.map[v] for v in c.parents]
f = [f for f, v in files]
@@ -196,6 +216,7 @@
def convert(self):
try:
+ self.source.setrevmap(self.map)
self.ui.status("scanning source...\n")
heads = self.source.getheads()
parents = self.walktree(heads)
@@ -239,15 +260,20 @@
self.mapfilefd.close()
def _convert(ui, src, dest=None, mapfile=None, **opts):
- '''Convert a foreign SCM repository to a Mercurial one.
+ """Convert a foreign SCM repository to a Mercurial one.
Accepted source formats:
- GIT
- CVS
+ - SVN
Accepted destination formats:
- Mercurial
+ If no revision is given, all revisions will be converted. Otherwise,
+ convert will only import up to the named revision (given in a format
+ understood by the source).
+
If no destination directory name is specified, it defaults to the
basename of the source with '-hg' appended. If the destination
repository doesn't exist, it will be created.
@@ -267,19 +293,16 @@
that use unix logins to identify authors (eg: CVS). One line per author
mapping and the line format is:
srcauthor=whatever string you want
- '''
+ """
util._encoding = 'UTF-8'
- srcc = converter(ui, src)
- if not hasattr(srcc, "getcommit"):
- raise util.Abort("%s: can't read from this repo type" % src)
-
if not dest:
dest = hg.defaultdest(src) + "-hg"
ui.status("assuming destination %s\n" % dest)
# Try to be smart and initalize things when required
+ created = False
if os.path.isdir(dest):
if len(os.listdir(dest)) > 0:
try:
@@ -294,15 +317,22 @@
else:
ui.status("initializing destination %s repository\n" % dest)
hg.repository(ui, dest, create=True)
+ created = True
elif os.path.exists(dest):
raise util.Abort("destination %s exists and is not a directory" % dest)
else:
ui.status("initializing destination %s repository\n" % dest)
hg.repository(ui, dest, create=True)
+ created = True
- destc = converter(ui, dest)
- if not hasattr(destc, "putcommit"):
- raise util.Abort("%s: can't write to this repo type" % src)
+ destc = convertsink(ui, dest)
+
+ try:
+ srcc = convertsource(ui, src, rev=opts.get('rev'))
+ except Exception:
+ if created:
+ shutil.rmtree(dest, True)
+ raise
if not mapfile:
try:
@@ -317,6 +347,7 @@
"convert":
(_convert,
[('A', 'authors', '', 'username mapping filename'),
+ ('r', 'rev', '', 'import up to target revision REV'),
('', 'datesort', None, 'try to sort changesets by date')],
'hg convert [OPTION]... SOURCE [DEST [MAPFILE]]'),
}
--- a/hgext/convert/common.py Sat Jul 21 17:36:45 2007 +0200
+++ b/hgext/convert/common.py Sat Jul 21 17:37:39 2007 +0200
@@ -4,18 +4,31 @@
class commit(object):
def __init__(self, **parts):
+ self.rev = None
+ self.branch = None
+
for x in "author date desc parents".split():
if not x in parts:
raise util.Abort("commit missing field %s" % x)
self.__dict__.update(parts)
+ if not self.desc or self.desc.isspace():
+ self.desc = '*** empty log message ***'
class converter_source(object):
"""Conversion source interface"""
- def __init__(self, ui, path):
+ def __init__(self, ui, path, rev=None):
"""Initialize conversion source (or raise NoRepo("message")
exception if path is not a valid repository)"""
- raise NotImplementedError()
+ self.ui = ui
+ self.path = path
+ self.rev = rev
+
+ self.encoding = 'utf-8'
+
+ def setrevmap(self, revmap):
+ """set the map of already-converted revisions"""
+ pass
def getheads(self):
"""Return a list of this repository's heads"""
@@ -44,6 +57,18 @@
"""Return the tags as a dictionary of name: revision"""
raise NotImplementedError()
+ def recode(self, s, encoding=None):
+ if not encoding:
+ encoding = self.encoding or 'utf-8'
+
+ try:
+ return s.decode(encoding).encode("utf-8")
+ except:
+ try:
+ return s.decode("latin-1").encode("utf-8")
+ except:
+ return s.decode(encoding, "replace").encode("utf-8")
+
class converter_sink(object):
"""Conversion sink (target) interface"""
--- a/hgext/convert/cvs.py Sat Jul 21 17:36:45 2007 +0200
+++ b/hgext/convert/cvs.py Sat Jul 21 17:37:39 2007 +0200
@@ -6,9 +6,9 @@
from common import NoRepo, commit, converter_source
class convert_cvs(converter_source):
- def __init__(self, ui, path):
- self.path = path
- self.ui = ui
+ def __init__(self, ui, path, rev=None):
+ super(convert_cvs, self).__init__(ui, path, rev=rev)
+
cvs = os.path.join(path, "CVS")
if not os.path.exists(cvs):
raise NoRepo("couldn't open CVS repo %s" % path)
@@ -29,15 +29,32 @@
if self.changeset:
return
+ maxrev = 0
+ cmd = 'cvsps -A -u --cvs-direct -q'
+ if self.rev:
+ # TODO: handle tags
+ try:
+ # patchset number?
+ maxrev = int(self.rev)
+ except ValueError:
+ try:
+ # date
+ util.parsedate(self.rev, ['%Y/%m/%d %H:%M:%S'])
+ cmd = "%s -d '1970/01/01 00:00:01' -d '%s'" % (cmd, self.rev)
+ except util.Abort:
+ raise util.Abort('revision %s is not a patchset number or date' % self.rev)
+
d = os.getcwd()
try:
os.chdir(self.path)
id = None
state = 0
- for l in os.popen("cvsps -A -u --cvs-direct -q"):
+ for l in os.popen(cmd):
if state == 0: # header
if l.startswith("PatchSet"):
id = l[9:-2]
+ if maxrev and int(id) > maxrev:
+ state = 3
elif l.startswith("Date"):
date = util.parsedate(l[6:-1], ["%Y/%m/%d %H:%M:%S"])
date = util.datestr(date)
@@ -62,8 +79,6 @@
if l == "Members: \n":
files = {}
log = self.recode(log[:-1])
- if log.isspace():
- log = "*** empty log message ***\n"
state = 2
else:
log += l
@@ -85,6 +100,8 @@
rev = l[colon+1:-2]
rev = rev.split("->")[1]
files[file] = rev
+ elif state == 3:
+ continue
self.heads = self.lastbranch.values()
finally:
@@ -235,9 +252,6 @@
cl.sort()
return cl
- def recode(self, text):
- return text.decode(self.encoding, "replace").encode("utf-8")
-
def getcommit(self, rev):
return self.changeset[rev]
--- a/hgext/convert/git.py Sat Jul 21 17:36:45 2007 +0200
+++ b/hgext/convert/git.py Sat Jul 21 17:37:39 2007 +0200
@@ -4,32 +4,29 @@
from common import NoRepo, commit, converter_source
-def recode(s):
- try:
- return s.decode("utf-8").encode("utf-8")
- except:
- try:
- return s.decode("latin-1").encode("utf-8")
- except:
- return s.decode("utf-8", "replace").encode("utf-8")
+class convert_git(converter_source):
+ def gitcmd(self, s):
+ return os.popen('GIT_DIR=%s %s' % (self.path, s))
-class convert_git(converter_source):
- def __init__(self, ui, path):
+ def __init__(self, ui, path, rev=None):
+ super(convert_git, self).__init__(ui, path, rev=rev)
+
if os.path.isdir(path + "/.git"):
path += "/.git"
- self.path = path
- self.ui = ui
if not os.path.exists(path + "/objects"):
raise NoRepo("couldn't open GIT repo %s" % path)
+ self.path = path
def getheads(self):
- fh = os.popen("GIT_DIR=%s git-rev-parse --verify HEAD" % self.path)
- return [fh.read()[:-1]]
+ if not self.rev:
+ return self.gitcmd('git-rev-parse --branches').read().splitlines()
+ else:
+ fh = self.gitcmd("git-rev-parse --verify %s" % self.rev)
+ return [fh.read()[:-1]]
def catfile(self, rev, type):
if rev == "0" * 40: raise IOError()
- fh = os.popen("GIT_DIR=%s git-cat-file %s %s 2>/dev/null"
- % (self.path, type, rev))
+ fh = self.gitcmd("git-cat-file %s %s 2>/dev/null" % (type, rev))
return fh.read()
def getfile(self, name, rev):
@@ -40,8 +37,7 @@
def getchanges(self, version):
self.modecache = {}
- fh = os.popen("GIT_DIR=%s git-diff-tree --root -m -r %s"
- % (self.path, version))
+ fh = self.gitcmd("git-diff-tree --root -m -r %s" % version)
changes = []
for l in fh:
if "\t" not in l: continue
@@ -58,7 +54,7 @@
c = self.catfile(version, "commit") # read the commit hash
end = c.find("\n\n")
message = c[end+2:]
- message = recode(message)
+ message = self.recode(message)
l = c[:end].splitlines()
manifest = l[0].split()[1]
parents = []
@@ -69,13 +65,13 @@
tm, tz = p[-2:]
author = " ".join(p[:-2])
if author[0] == "<": author = author[1:-1]
- author = recode(author)
+ author = self.recode(author)
if n == "committer":
p = v.split()
tm, tz = p[-2:]
committer = " ".join(p[:-2])
if committer[0] == "<": committer = committer[1:-1]
- committer = recode(committer)
+ committer = self.recode(committer)
message += "\ncommitter: %s\n" % committer
if n == "parent": parents.append(v)
@@ -84,12 +80,13 @@
date = tm + " " + str(tz)
author = author or "unknown"
- c = commit(parents=parents, date=date, author=author, desc=message)
+ c = commit(parents=parents, date=date, author=author, desc=message,
+ rev=version)
return c
def gettags(self):
tags = {}
- fh = os.popen('git-ls-remote --tags "%s" 2>/dev/null' % self.path)
+ fh = self.gitcmd('git-ls-remote --tags "%s" 2>/dev/null' % self.path)
prefix = 'refs/tags/'
for line in fh:
line = line.strip()
--- a/hgext/convert/hg.py Sat Jul 21 17:36:45 2007 +0200
+++ b/hgext/convert/hg.py Sat Jul 21 17:37:39 2007 +0200
@@ -29,6 +29,9 @@
if self.repo.dirstate.state(f) == '?':
self.repo.dirstate.update([f], "a")
+ def copyfile(self, source, dest):
+ self.repo.copy(source, dest)
+
def delfile(self, f):
try:
os.unlink(self.repo.wjoin(f))
@@ -51,10 +54,10 @@
text = commit.desc
extra = {}
- try:
- extra["branch"] = commit.branch
- except AttributeError:
- pass
+ if commit.branch:
+ extra['branch'] = commit.branch
+ if commit.rev:
+ extra['convert_revision'] = commit.rev
while parents:
p1 = p2
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/hgext/convert/subversion.py Sat Jul 21 17:37:39 2007 +0200
@@ -0,0 +1,667 @@
+# Subversion 1.4/1.5 Python API backend
+#
+# Copyright(C) 2007 Daniel Holth et al
+#
+# Configuration options:
+#
+# convert.svn.trunk
+# Relative path to the trunk (default: "trunk")
+# convert.svn.branches
+# Relative path to tree of branches (default: "branches")
+#
+# Set these in a hgrc, or on the command line as follows:
+#
+# hg convert --config convert.svn.trunk=wackoname [...]
+
+import locale
+import os
+import cPickle as pickle
+from mercurial import util
+
+# Subversion stuff. Works best with very recent Python SVN bindings
+# e.g. SVN 1.5 or backports. Thanks to the bzr folks for enhancing
+# these bindings.
+
+from cStringIO import StringIO
+
+from common import NoRepo, commit, converter_source
+
+try:
+ from svn.core import SubversionException, Pool
+ import svn.core
+ import svn.ra
+ import svn.delta
+ import svn
+ import transport
+except ImportError:
+ pass
+
+class CompatibilityException(Exception): pass
+
+class changedpath(object):
+ def __init__(self, p):
+ self.copyfrom_path = p.copyfrom_path
+ self.copyfrom_rev = p.copyfrom_rev
+ self.action = p.action
+
+# SVN conversion code stolen from bzr-svn and tailor
+class convert_svn(converter_source):
+ def __init__(self, ui, url, rev=None):
+ super(convert_svn, self).__init__(ui, url, rev=rev)
+
+ try:
+ SubversionException
+ except NameError:
+ msg = 'subversion python bindings could not be loaded\n'
+ ui.warn(msg)
+ raise NoRepo(msg)
+
+ self.encoding = locale.getpreferredencoding()
+ self.lastrevs = {}
+
+ latest = None
+ if rev:
+ try:
+ latest = int(rev)
+ except ValueError:
+ raise util.Abort('svn: revision %s is not an integer' % rev)
+ try:
+ # Support file://path@rev syntax. Useful e.g. to convert
+ # deleted branches.
+ at = url.rfind('@')
+ if at >= 0:
+ latest = int(url[at+1:])
+ url = url[:at]
+ except ValueError, e:
+ pass
+ self.url = url
+ self.encoding = 'UTF-8' # Subversion is always nominal UTF-8
+ try:
+ self.transport = transport.SvnRaTransport(url=url)
+ self.ra = self.transport.ra
+ self.ctx = self.transport.client
+ self.base = svn.ra.get_repos_root(self.ra)
+ self.module = self.url[len(self.base):]
+ self.modulemap = {} # revision, module
+ self.commits = {}
+ self.files = {}
+ self.uuid = svn.ra.get_uuid(self.ra).decode(self.encoding)
+ except SubversionException, e:
+ raise NoRepo("couldn't open SVN repo %s" % url)
+
+ try:
+ self.get_blacklist()
+ except IOError, e:
+ pass
+
+ self.last_changed = self.latest(self.module, latest)
+
+ self.head = self.revid(self.last_changed)
+
+ def setrevmap(self, revmap):
+ lastrevs = {}
+ for revid in revmap.keys():
+ uuid, module, revnum = self.revsplit(revid)
+ lastrevnum = lastrevs.setdefault(module, revnum)
+ if revnum > lastrevnum:
+ lastrevs[module] = revnum
+ self.lastrevs = lastrevs
+
+ def exists(self, path, optrev):
+ try:
+ return svn.client.ls(self.url.rstrip('/') + '/' + path,
+ optrev, False, self.ctx)
+ except SubversionException, err:
+ return []
+
+ def getheads(self):
+ # detect standard /branches, /tags, /trunk layout
+ optrev = svn.core.svn_opt_revision_t()
+ optrev.kind = svn.core.svn_opt_revision_number
+ optrev.value.number = self.last_changed
+ rpath = self.url.strip('/')
+ cfgtrunk = self.ui.config('convert', 'svn.trunk')
+ cfgbranches = self.ui.config('convert', 'svn.branches')
+ trunk = (cfgtrunk or 'trunk').strip('/')
+ branches = (cfgbranches or 'branches').strip('/')
+ if self.exists(trunk, optrev) and self.exists(branches, optrev):
+ self.ui.note('found trunk at %r and branches at %r\n' %
+ (trunk, branches))
+ oldmodule = self.module
+ self.module += '/' + trunk
+ lt = self.latest(self.module, self.last_changed)
+ self.head = self.revid(lt)
+ self.heads = [self.head]
+ branchnames = svn.client.ls(rpath + '/' + branches, optrev, False,
+ self.ctx)
+ for branch in branchnames.keys():
+ if oldmodule:
+ module = '/' + oldmodule + '/' + branches + '/' + branch
+ else:
+ module = '/' + branches + '/' + branch
+ brevnum = self.latest(module, self.last_changed)
+ brev = self.revid(brevnum, module)
+ self.ui.note('found branch %s at %d\n' % (branch, brevnum))
+ self.heads.append(brev)
+ elif cfgtrunk or cfgbranches:
+ raise util.Abort(_('trunk/branch layout expected, '
+ 'but not found'))
+ else:
+ self.ui.note('working with one branch\n')
+ self.heads = [self.head]
+ return self.heads
+
+ def getfile(self, file, rev):
+ data, mode = self._getfile(file, rev)
+ self.modecache[(file, rev)] = mode
+ return data
+
+ def getmode(self, file, rev):
+ return self.modecache[(file, rev)]
+
+ def getchanges(self, rev):
+ self.modecache = {}
+ files = self.files[rev]
+ cl = files
+ cl.sort()
+ # caller caches the result, so free it here to release memory
+ del self.files[rev]
+ return cl
+
+ def getcommit(self, rev):
+ if rev not in self.commits:
+ uuid, module, revnum = self.revsplit(rev)
+ self.module = module
+ self.reparent(module)
+ stop = self.lastrevs.get(module, 0)
+ self._fetch_revisions(from_revnum=revnum, to_revnum=stop)
+ commit = self.commits[rev]
+ # caller caches the result, so free it here to release memory
+ del self.commits[rev]
+ return commit
+
+ def get_log(self, paths, start, end, limit=0, discover_changed_paths=True,
+ strict_node_history=False):
+ '''wrapper for svn.ra.get_log.
+ on a large repository, svn.ra.get_log pins huge amounts of
+ memory that cannot be recovered. work around it by forking
+ and writing results over a pipe.'''
+
+ def child(fp):
+ protocol = -1
+ def receiver(orig_paths, revnum, author, date, message, pool):
+ if orig_paths is not None:
+ for k, v in orig_paths.iteritems():
+ orig_paths[k] = changedpath(v)
+ pickle.dump((orig_paths, revnum, author, date, message),
+ fp, protocol)
+
+ try:
+ # Use an ra of our own so that our parent can consume
+ # our results without confusing the server.
+ t = transport.SvnRaTransport(url=self.url)
+ svn.ra.get_log(t.ra, paths, start, end, limit,
+ discover_changed_paths,
+ strict_node_history,
+ receiver)
+ except SubversionException, (_, num):
+ self.ui.print_exc()
+ pickle.dump(num, fp, protocol)
+ else:
+ pickle.dump(None, fp, protocol)
+ fp.close()
+
+ def parent(fp):
+ while True:
+ entry = pickle.load(fp)
+ try:
+ orig_paths, revnum, author, date, message = entry
+ except:
+ if entry is None:
+ break
+ raise SubversionException("child raised exception", entry)
+ yield entry
+
+ rfd, wfd = os.pipe()
+ pid = os.fork()
+ if pid:
+ os.close(wfd)
+ for p in parent(os.fdopen(rfd, 'rb')):
+ yield p
+ ret = os.waitpid(pid, 0)[1]
+ if ret:
+ raise util.Abort(_('get_log %s') % util.explain_exit(ret))
+ else:
+ os.close(rfd)
+ child(os.fdopen(wfd, 'wb'))
+ os._exit(0)
+
+ def gettags(self):
+ tags = {}
+ start = self.revnum(self.head)
+ try:
+ for entry in self.get_log(['/tags'], 0, start):
+ orig_paths, revnum, author, date, message = entry
+ for path in orig_paths:
+ if not path.startswith('/tags/'):
+ continue
+ ent = orig_paths[path]
+ source = ent.copyfrom_path
+ rev = ent.copyfrom_rev
+ tag = path.split('/', 2)[2]
+ tags[tag] = self.revid(rev, module=source)
+ except SubversionException, (_, num):
+ self.ui.note('no tags found at revision %d\n' % start)
+ return tags
+
+ # -- helper functions --
+
+ def revid(self, revnum, module=None):
+ if not module:
+ module = self.module
+ return (u"svn:%s%s@%s" % (self.uuid, module, revnum)).decode(self.encoding)
+
+ def revnum(self, rev):
+ return int(rev.split('@')[-1])
+
+ def revsplit(self, rev):
+ url, revnum = rev.encode(self.encoding).split('@', 1)
+ revnum = int(revnum)
+ parts = url.split('/', 1)
+ uuid = parts.pop(0)[4:]
+ mod = ''
+ if parts:
+ mod = '/' + parts[0]
+ return uuid, mod, revnum
+
+ def latest(self, path, stop=0):
+ 'find the latest revision affecting path, up to stop'
+ if not stop:
+ stop = svn.ra.get_latest_revnum(self.ra)
+ try:
+ self.reparent('')
+ dirent = svn.ra.stat(self.ra, path.strip('/'), stop)
+ self.reparent(self.module)
+ except SubversionException:
+ dirent = None
+ if not dirent:
+ print self.base, path
+ raise util.Abort('%s not found up to revision %d' % (path, stop))
+
+ return dirent.created_rev
+
+ def get_blacklist(self):
+ """Avoid certain revision numbers.
+ It is not uncommon for two nearby revisions to cancel each other
+ out, e.g. 'I copied trunk into a subdirectory of itself instead
+ of making a branch'. The converted repository is significantly
+ smaller if we ignore such revisions."""
+ self.blacklist = set()
+ blacklist = self.blacklist
+ for line in file("blacklist.txt", "r"):
+ if not line.startswith("#"):
+ try:
+ svn_rev = int(line.strip())
+ blacklist.add(svn_rev)
+ except ValueError, e:
+ pass # not an integer or a comment
+
+ def is_blacklisted(self, svn_rev):
+ return svn_rev in self.blacklist
+
+ def reparent(self, module):
+ svn_url = self.base + module
+ self.ui.debug("reparent to %s\n" % svn_url.encode(self.encoding))
+ svn.ra.reparent(self.ra, svn_url.encode(self.encoding))
+
+ def _fetch_revisions(self, from_revnum = 0, to_revnum = 347):
+ def get_entry_from_path(path, module=self.module):
+ # Given the repository url of this wc, say
+ # "http://server/plone/CMFPlone/branches/Plone-2_0-branch"
+ # extract the "entry" portion (a relative path) from what
+ # svn log --xml says, ie
+ # "/CMFPlone/branches/Plone-2_0-branch/tests/PloneTestCase.py"
+ # that is to say "tests/PloneTestCase.py"
+
+ if path.startswith(module):
+ relative = path[len(module):]
+ if relative.startswith('/'):
+ return relative[1:]
+ else:
+ return relative
+
+ # The path is outside our tracked tree...
+ self.ui.debug('Ignoring %r since it is not under %r\n' % (path, module))
+ return None
+
+ self.child_cset = None
+ def parselogentry(orig_paths, revnum, author, date, message):
+ self.ui.debug("parsing revision %d (%d changes)\n" %
+ (revnum, len(orig_paths)))
+
+ if revnum in self.modulemap:
+ new_module = self.modulemap[revnum]
+ if new_module != self.module:
+ self.module = new_module
+ self.reparent(self.module)
+
+ copyfrom = {} # Map of entrypath, revision for finding source of deleted revisions.
+ copies = {}
+ entries = []
+ rev = self.revid(revnum)
+ parents = []
+
+ # branch log might return entries for a parent we already have
+ if (rev in self.commits or
+ (revnum < self.lastrevs.get(self.module, 0))):
+ return
+
+ try:
+ branch = self.module.split("/")[-1]
+ if branch == 'trunk':
+ branch = ''
+ except IndexError:
+ branch = None
+
+ orig_paths = orig_paths.items()
+ orig_paths.sort()
+ for path, ent in orig_paths:
+ # self.ui.write("path %s\n" % path)
+ if path == self.module: # Follow branching back in history
+ if ent:
+ if ent.copyfrom_path:
+ # ent.copyfrom_rev may not be the actual last revision
+ prev = self.latest(ent.copyfrom_path, ent.copyfrom_rev)
+ self.modulemap[prev] = ent.copyfrom_path
+ parents = [self.revid(prev, ent.copyfrom_path)]
+ self.ui.note('found parent of branch %s at %d: %s\n' % \
+ (self.module, prev, ent.copyfrom_path))
+ else:
+ self.ui.debug("No copyfrom path, don't know what to do.\n")
+ # Maybe it was added and there is no more history.
+ entrypath = get_entry_from_path(path, module=self.module)
+ # self.ui.write("entrypath %s\n" % entrypath)
+ if entrypath is None:
+ # Outside our area of interest
+ self.ui.debug("boring@%s: %s\n" % (revnum, path))
+ continue
+ entry = entrypath.decode(self.encoding)
+
+ kind = svn.ra.check_path(self.ra, entrypath, revnum)
+ if kind == svn.core.svn_node_file:
+ if ent.copyfrom_path:
+ copyfrom_path = get_entry_from_path(ent.copyfrom_path)
+ if copyfrom_path:
+ self.ui.debug("Copied to %s from %s@%s\n" % (entry, copyfrom_path, ent.copyfrom_rev))
+ # It's probably important for hg that the source
+ # exists in the revision's parent, not just the
+ # ent.copyfrom_rev
+ fromkind = svn.ra.check_path(self.ra, copyfrom_path, ent.copyfrom_rev)
+ if fromkind != 0:
+ copies[self.recode(entry)] = self.recode(copyfrom_path)
+ entries.append(self.recode(entry))
+ elif kind == 0: # gone, but had better be a deleted *file*
+ self.ui.debug("gone from %s\n" % ent.copyfrom_rev)
+
+ # if a branch is created but entries are removed in the same
+ # changeset, get the right fromrev
+ if parents:
+ uuid, old_module, fromrev = self.revsplit(parents[0])
+ else:
+ fromrev = revnum - 1
+ # might always need to be revnum - 1 in these 3 lines?
+ old_module = self.modulemap.get(fromrev, self.module)
+
+ basepath = old_module + "/" + get_entry_from_path(path, module=self.module)
+ entrypath = old_module + "/" + get_entry_from_path(path, module=self.module)
+
+ def lookup_parts(p):
+ rc = None
+ parts = p.split("/")
+ for i in range(len(parts)):
+ part = "/".join(parts[:i])
+ info = part, copyfrom.get(part, None)
+ if info[1] is not None:
+ self.ui.debug("Found parent directory %s\n" % info[1])
+ rc = info
+ return rc
+
+ self.ui.debug("base, entry %s %s\n" % (basepath, entrypath))
+
+ frompath, froment = lookup_parts(entrypath) or (None, revnum - 1)
+
+ # need to remove fragment from lookup_parts and replace with copyfrom_path
+ if frompath is not None:
+ self.ui.debug("munge-o-matic\n")
+ self.ui.debug(entrypath + '\n')
+ self.ui.debug(entrypath[len(frompath):] + '\n')
+ entrypath = froment.copyfrom_path + entrypath[len(frompath):]
+ fromrev = froment.copyfrom_rev
+ self.ui.debug("Info: %s %s %s %s\n" % (frompath, froment, ent, entrypath))
+
+ fromkind = svn.ra.check_path(self.ra, entrypath, fromrev)
+ if fromkind == svn.core.svn_node_file: # a deleted file
+ entries.append(self.recode(entry))
+ elif fromkind == svn.core.svn_node_dir:
+ # print "Deleted/moved non-file:", revnum, path, ent
+ # children = self._find_children(path, revnum - 1)
+ # print "find children %s@%d from %d action %s" % (path, revnum, ent.copyfrom_rev, ent.action)
+ # Sometimes this is tricky. For example: in
+ # The Subversion Repository revision 6940 a dir
+ # was copied and one of its files was deleted
+ # from the new location in the same commit. This
+ # code can't deal with that yet.
+ if ent.action == 'C':
+ children = self._find_children(path, fromrev)
+ else:
+ oroot = entrypath.strip('/')
+ nroot = path.strip('/')
+ children = self._find_children(oroot, fromrev)
+ children = [s.replace(oroot,nroot) for s in children]
+ # Mark all [files, not directories] as deleted.
+ for child in children:
+ # Can we move a child directory and its
+ # parent in the same commit? (probably can). Could
+ # cause problems if instead of revnum -1,
+ # we have to look in (copyfrom_path, revnum - 1)
+ entrypath = get_entry_from_path("/" + child, module=old_module)
+ if entrypath:
+ entry = self.recode(entrypath.decode(self.encoding))
+ if entry in copies:
+ # deleted file within a copy
+ del copies[entry]
+ else:
+ entries.append(entry)
+ else:
+ self.ui.debug('unknown path in revision %d: %s\n' % \
+ (revnum, path))
+ elif kind == svn.core.svn_node_dir:
+ # Should probably synthesize normal file entries
+ # and handle as above to clean up copy/rename handling.
+
+ # If the directory just had a prop change,
+ # then we shouldn't need to look for its children.
+ # Also this could create duplicate entries. Not sure
+ # whether this will matter. Maybe should make entries a set.
+ # print "Changed directory", revnum, path, ent.action, ent.copyfrom_path, ent.copyfrom_rev
+ # This will fail if a directory was copied
+ # from another branch and then some of its files
+ # were deleted in the same transaction.
+ children = self._find_children(path, revnum)
+ children.sort()
+ for child in children:
+ # Can we move a child directory and its
+ # parent in the same commit? (probably can). Could
+ # cause problems if instead of revnum -1,
+ # we have to look in (copyfrom_path, revnum - 1)
+ entrypath = get_entry_from_path("/" + child, module=self.module)
+ # print child, self.module, entrypath
+ if entrypath:
+ # Need to filter out directories here...
+ kind = svn.ra.check_path(self.ra, entrypath, revnum)
+ if kind != svn.core.svn_node_dir:
+ entries.append(self.recode(entrypath))
+
+ # Copies here (must copy all from source)
+ # Probably not a real problem for us if
+ # source does not exist
+
+ # Can do this with the copy command "hg copy"
+ # if ent.copyfrom_path:
+ # copyfrom_entry = get_entry_from_path(ent.copyfrom_path.decode(self.encoding),
+ # module=self.module)
+ # copyto_entry = entrypath
+ #
+ # print "copy directory", copyfrom_entry, 'to', copyto_entry
+ #
+ # copies.append((copyfrom_entry, copyto_entry))
+
+ if ent.copyfrom_path:
+ copyfrom_path = ent.copyfrom_path.decode(self.encoding)
+ copyfrom_entry = get_entry_from_path(copyfrom_path, module=self.module)
+ if copyfrom_entry:
+ copyfrom[path] = ent
+ self.ui.debug("mark %s came from %s\n" % (path, copyfrom[path]))
+
+ # Good, /probably/ a regular copy. Really should check
+ # to see whether the parent revision actually contains
+ # the directory in question.
+ children = self._find_children(self.recode(copyfrom_path), ent.copyfrom_rev)
+ children.sort()
+ for child in children:
+ entrypath = get_entry_from_path("/" + child, module=self.module)
+ if entrypath:
+ entry = entrypath.decode(self.encoding)
+ # print "COPY COPY From", copyfrom_entry, entry
+ copyto_path = path + entry[len(copyfrom_entry):]
+ copyto_entry = get_entry_from_path(copyto_path, module=self.module)
+ # print "COPY", entry, "COPY To", copyto_entry
+ copies[self.recode(copyto_entry)] = self.recode(entry)
+ # copy from quux splort/quuxfile
+
+ self.modulemap[revnum] = self.module # track backwards in time
+ # a list of (filename, id) where id lets us retrieve the file.
+ # eg in git, id is the object hash. for svn it'll be the
+ self.files[rev] = zip(entries, [rev] * len(entries))
+ if not entries:
+ return
+
+ # Example SVN datetime. Includes microseconds.
+ # ISO-8601 conformant
+ # '2007-01-04T17:35:00.902377Z'
+ date = util.parsedate(date[:18] + " UTC", ["%Y-%m-%dT%H:%M:%S"])
+
+ log = message and self.recode(message)
+ author = author and self.recode(author) or ''
+
+ cset = commit(author=author,
+ date=util.datestr(date),
+ desc=log,
+ parents=parents,
+ copies=copies,
+ branch=branch,
+ rev=rev.encode('utf-8'))
+
+ self.commits[rev] = cset
+ if self.child_cset and not self.child_cset.parents:
+ self.child_cset.parents = [rev]
+ self.child_cset = cset
+
+ self.ui.note('fetching revision log for "%s" from %d to %d\n' %
+ (self.module, from_revnum, to_revnum))
+
+ try:
+ discover_changed_paths = True
+ strict_node_history = False
+ for entry in self.get_log([self.module], from_revnum, to_revnum):
+ orig_paths, revnum, author, date, message = entry
+ if self.is_blacklisted(revnum):
+ self.ui.note('skipping blacklisted revision %d\n' % revnum)
+ continue
+ if orig_paths is None:
+ self.ui.debug('revision %d has no entries\n' % revnum)
+ continue
+ parselogentry(orig_paths, revnum, author, date, message)
+ except SubversionException, (_, num):
+ if num == svn.core.SVN_ERR_FS_NO_SUCH_REVISION:
+ raise NoSuchRevision(branch=self,
+ revision="Revision number %d" % to_revnum)
+ raise
+
+ def _getfile(self, file, rev):
+ io = StringIO()
+ # TODO: ra.get_file transmits the whole file instead of diffs.
+ mode = ''
+ try:
+ revnum = self.revnum(rev)
+ if self.module != self.modulemap[revnum]:
+ self.module = self.modulemap[revnum]
+ self.reparent(self.module)
+ info = svn.ra.get_file(self.ra, file, revnum, io)
+ if isinstance(info, list):
+ info = info[-1]
+ mode = ("svn:executable" in info) and 'x' or ''
+ mode = ("svn:special" in info) and 'l' or mode
+ except SubversionException, e:
+ notfound = (svn.core.SVN_ERR_FS_NOT_FOUND,
+ svn.core.SVN_ERR_RA_DAV_PATH_NOT_FOUND)
+ if e.apr_err in notfound: # File not found
+ raise IOError()
+ raise
+ data = io.getvalue()
+ if mode == 'l':
+ link_prefix = "link "
+ if data.startswith(link_prefix):
+ data = data[len(link_prefix):]
+ return data, mode
+
+ def _find_children(self, path, revnum):
+ path = path.strip("/")
+
+ def _find_children_fallback(path, revnum):
+ # SWIG python bindings for getdir are broken up to at least 1.4.3
+ pool = Pool()
+ optrev = svn.core.svn_opt_revision_t()
+ optrev.kind = svn.core.svn_opt_revision_number
+ optrev.value.number = revnum
+ rpath = '/'.join([self.base, path]).strip('/')
+ return ['%s/%s' % (path, x) for x in svn.client.ls(rpath, optrev, True, self.ctx, pool).keys()]
+
+ if hasattr(self, '_find_children_fallback'):
+ return _find_children_fallback(path, revnum)
+
+ self.reparent("/" + path)
+ pool = Pool()
+
+ children = []
+ def find_children_inner(children, path, revnum = revnum):
+ if hasattr(svn.ra, 'get_dir2'): # Since SVN 1.4
+ fields = 0xffffffff # Binding does not provide SVN_DIRENT_ALL
+ getdir = svn.ra.get_dir2(self.ra, path, revnum, fields, pool)
+ else:
+ getdir = svn.ra.get_dir(self.ra, path, revnum, pool)
+ if type(getdir) == dict:
+ # python binding for getdir is broken up to at least 1.4.3
+ raise CompatibilityException()
+ dirents = getdir[0]
+ if type(dirents) == int:
+ # got here once due to infinite recursion bug
+ return
+ c = dirents.keys()
+ c.sort()
+ for child in c:
+ dirent = dirents[child]
+ if dirent.kind == svn.core.svn_node_dir:
+ find_children_inner(children, (path + "/" + child).strip("/"))
+ else:
+ children.append((path + "/" + child).strip("/"))
+
+ try:
+ find_children_inner(children, "")
+ except CompatibilityException:
+ self._find_children_fallback = True
+ self.reparent(self.module)
+ return _find_children_fallback(path, revnum)
+
+ self.reparent(self.module)
+ return [path + "/" + c for c in children]
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/hgext/convert/transport.py Sat Jul 21 17:37:39 2007 +0200
@@ -0,0 +1,125 @@
+# -*- coding: utf-8 -*-
+
+# Copyright (C) 2007 Daniel Holth <dholth@fastmail.fm>
+# This is a stripped-down version of the original bzr-svn transport.py,
+# Copyright (C) 2006 Jelmer Vernooij <jelmer@samba.org>
+
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+
+from cStringIO import StringIO
+import os
+from tempfile import mktemp
+
+from svn.core import SubversionException, Pool
+import svn.ra
+import svn.client
+import svn.core
+
+# Some older versions of the Python bindings need to be
+# explicitly initialized. But what we want to do probably
+# won't work worth a darn against those libraries anyway!
+svn.ra.initialize()
+
+svn_config = svn.core.svn_config_get_config(None)
+
+
+def _create_auth_baton(pool):
+ """Create a Subversion authentication baton. """
+ import svn.client
+ # Give the client context baton a suite of authentication
+ # providers.h
+ providers = [
+ svn.client.get_simple_provider(pool),
+ svn.client.get_username_provider(pool),
+ svn.client.get_ssl_client_cert_file_provider(pool),
+ svn.client.get_ssl_client_cert_pw_file_provider(pool),
+ svn.client.get_ssl_server_trust_file_provider(pool),
+ ]
+ return svn.core.svn_auth_open(providers, pool)
+
+class NotBranchError(SubversionException):
+ pass
+
+class SvnRaTransport(object):
+ """
+ Open an ra connection to a Subversion repository.
+ """
+ def __init__(self, url="", ra=None):
+ self.pool = Pool()
+ self.svn_url = url
+ self.username = ''
+ self.password = ''
+
+ # Only Subversion 1.4 has reparent()
+ if ra is None or not hasattr(svn.ra, 'reparent'):
+ self.client = svn.client.create_context(self.pool)
+ ab = _create_auth_baton(self.pool)
+ if False:
+ svn.core.svn_auth_set_parameter(
+ ab, svn.core.SVN_AUTH_PARAM_DEFAULT_USERNAME, self.username)
+ svn.core.svn_auth_set_parameter(
+ ab, svn.core.SVN_AUTH_PARAM_DEFAULT_PASSWORD, self.password)
+ self.client.auth_baton = ab
+ self.client.config = svn_config
+ try:
+ self.ra = svn.client.open_ra_session(
+ self.svn_url.encode('utf8'),
+ self.client, self.pool)
+ except SubversionException, (_, num):
+ if num in (svn.core.SVN_ERR_RA_ILLEGAL_URL,
+ svn.core.SVN_ERR_RA_LOCAL_REPOS_OPEN_FAILED,
+ svn.core.SVN_ERR_BAD_URL):
+ raise NotBranchError(url)
+ raise
+ else:
+ self.ra = ra
+ svn.ra.reparent(self.ra, self.svn_url.encode('utf8'))
+
+ class Reporter:
+ def __init__(self, (reporter, report_baton)):
+ self._reporter = reporter
+ self._baton = report_baton
+
+ def set_path(self, path, revnum, start_empty, lock_token, pool=None):
+ svn.ra.reporter2_invoke_set_path(self._reporter, self._baton,
+ path, revnum, start_empty, lock_token, pool)
+
+ def delete_path(self, path, pool=None):
+ svn.ra.reporter2_invoke_delete_path(self._reporter, self._baton,
+ path, pool)
+
+ def link_path(self, path, url, revision, start_empty, lock_token,
+ pool=None):
+ svn.ra.reporter2_invoke_link_path(self._reporter, self._baton,
+ path, url, revision, start_empty, lock_token,
+ pool)
+
+ def finish_report(self, pool=None):
+ svn.ra.reporter2_invoke_finish_report(self._reporter,
+ self._baton, pool)
+
+ def abort_report(self, pool=None):
+ svn.ra.reporter2_invoke_abort_report(self._reporter,
+ self._baton, pool)
+
+ def do_update(self, revnum, path, *args, **kwargs):
+ return self.Reporter(svn.ra.do_update(self.ra, revnum, path, *args, **kwargs))
+
+ def clone(self, offset=None):
+ """See Transport.clone()."""
+ if offset is None:
+ return self.__class__(self.base)
+
+ return SvnRaTransport(urlutils.join(self.base, offset), ra=self.ra)
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/hgext/interhg.py Sat Jul 21 17:37:39 2007 +0200
@@ -0,0 +1,64 @@
+# interhg.py - interhg
+#
+# Copyright 2007 OHASHI Hideya <ohachige@gmail.com>
+#
+# This software may be used and distributed according to the terms
+# of the GNU General Public License, incorporated herein by reference.
+#
+# The `interhg' Mercurial extension allows you to change changelog and
+# summary text just like InterWiki way.
+#
+# To enable this extension:
+#
+# [extensions]
+# interhg =
+#
+# This is an example to link to a bug tracking system.
+#
+# [interhg]
+# pat1 = s/issue(\d+)/ <a href="http:\/\/bts\/issue\1">issue\1<\/a> /
+#
+# You can add patterns to use pat2, pat3, ...
+# For exapmle.
+#
+# pat2 = s/(^|\s)#(\d+)\b/ <b>#\2<\/b> /
+
+import re
+from mercurial.hgweb import hgweb_mod
+from mercurial import templater
+
+orig_escape = templater.common_filters["escape"]
+
+interhg_table = []
+
+def interhg_escape(x):
+ escstr = orig_escape(x)
+ for pat in interhg_table:
+ regexp = pat[0]
+ format = pat[1]
+ escstr = regexp.sub(format, escstr)
+ return escstr
+
+templater.common_filters["escape"] = interhg_escape
+
+orig_refresh = hgweb_mod.hgweb.refresh
+
+def interhg_refresh(self):
+ interhg_table[:] = []
+ num = 1
+ while True:
+ key = 'pat%d' % num
+ pat = self.config('interhg', key)
+ if pat == None:
+ break
+ pat = pat[2:-1]
+ span = re.search(r'[^\\]/', pat).span()
+ regexp = pat[:span[0] + 1]
+ format = pat[span[1]:]
+ format = re.sub(r'\\/', '/', format)
+ regexp = re.compile(regexp)
+ interhg_table.append((regexp, format))
+ num += 1
+ return orig_refresh(self)
+
+hgweb_mod.hgweb.refresh = interhg_refresh
--- a/hgext/mq.py Sat Jul 21 17:36:45 2007 +0200
+++ b/hgext/mq.py Sat Jul 21 17:37:39 2007 +0200
@@ -1487,11 +1487,20 @@
Source patch repository is looked for in <src>/.hg/patches by
default. Use -p <url> to change.
+
+ The patch directory must be a nested mercurial repository, as
+ would be created by qinit -c.
'''
cmdutil.setremoteconfig(ui, opts)
if dest is None:
dest = hg.defaultdest(source)
sr = hg.repository(ui, ui.expandpath(source))
+ patchdir = opts['patches'] or (sr.url() + '/.hg/patches')
+ try:
+ pr = hg.repository(ui, patchdir)
+ except hg.RepoError:
+ raise util.Abort(_('versioned patch repository not found'
+ ' (see qinit -c)'))
qbase, destrev = None, None
if sr.local():
if sr.mq.applied:
--- a/hgext/patchbomb.py Sat Jul 21 17:36:45 2007 +0200
+++ b/hgext/patchbomb.py Sat Jul 21 17:37:39 2007 +0200
@@ -306,8 +306,12 @@
d = cdiffstat(_('Final summary:\n'), jumbo)
if d: body = '\n' + d
- ui.write(_('\nWrite the introductory message for the patch series.\n\n'))
- body = ui.edit(body, sender)
+ if opts['desc']:
+ body = open(opts['desc']).read()
+ else:
+ ui.write(_('\nWrite the introductory message for the '
+ 'patch series.\n\n'))
+ body = ui.edit(body, sender)
msg = email.MIMEText.MIMEText(body)
msg['Subject'] = subj
@@ -417,6 +421,7 @@
('c', 'cc', [], _('email addresses of copy recipients')),
('d', 'diffstat', None, _('add diffstat output to messages')),
('', 'date', '', _('use the given date as the sending date')),
+ ('', 'desc', '', _('use the given file as the series description')),
('g', 'git', None, _('use git extended diff format')),
('f', 'from', '', _('email address of sender')),
('', 'plain', None, _('omit hg patch header')),
--- a/hgext/purge.py Sat Jul 21 17:36:45 2007 +0200
+++ b/hgext/purge.py Sat Jul 21 17:37:39 2007 +0200
@@ -31,7 +31,7 @@
from mercurial.i18n import _
import os
-def dopurge(ui, repo, dirs=None, act=True, ignored=False,
+def dopurge(ui, repo, dirs=None, act=True, ignored=False,
abort_on_err=False, eol='\n',
force=False, include=None, exclude=None):
def error(msg):
--- a/hgext/win32text.py Sat Jul 21 17:36:45 2007 +0200
+++ b/hgext/win32text.py Sat Jul 21 17:37:39 2007 +0200
@@ -1,7 +1,24 @@
-import mercurial.util
+from mercurial import util, ui
+from mercurial.i18n import gettext as _
+import re
+
+# regexp for single LF without CR preceding.
+re_single_lf = re.compile('(^|[^\r])\n', re.MULTILINE)
def dumbdecode(s, cmd):
- return s.replace('\n', '\r\n')
+ # warn if already has CRLF in repository.
+ # it might cause unexpected eol conversion.
+ # see issue 302:
+ # http://www.selenic.com/mercurial/bts/issue302
+ if '\r\n' in s:
+ u = ui.ui()
+ u.warn(_('WARNING: file in repository already has CRLF line ending \n'
+ ' which does not need eol conversion by win32text plugin.\n'
+ ' Please reconsider encode/decode setting in'
+ ' mercurial.ini or .hg/hgrc\n'
+ ' before next commit.\n'))
+ # replace single LF to CRLF
+ return re_single_lf.sub('\\1\r\n', s)
def dumbencode(s, cmd):
return s.replace('\r\n', '\n')
@@ -20,7 +37,7 @@
return dumbencode(s, cmd)
return s
-mercurial.util.filtertable.update({
+util.filtertable.update({
'dumbdecode:': dumbdecode,
'dumbencode:': dumbencode,
'cleverdecode:': cleverdecode,
--- a/hgmerge Sat Jul 21 17:36:45 2007 +0200
+++ b/hgmerge Sat Jul 21 17:37:39 2007 +0200
@@ -96,6 +96,20 @@
done
}
+# Check if conflict markers are present and ask if the merge was successful
+conflicts_or_success() {
+ while egrep '^(<<<<<<< .*|=======|>>>>>>> .*)$' "$LOCAL" >/dev/null; do
+ echo "$LOCAL contains conflict markers."
+ echo "Keep this version? [y/n]"
+ read answer
+ case "$answer" in
+ y*|Y*) success;;
+ n*|N*) failure;;
+ esac
+ done
+ success
+}
+
# Clean up when interrupted
trap "failure" 1 2 3 6 15 # HUP INT QUIT ABRT TERM
@@ -123,20 +137,20 @@
# filemerge prefers the right by default
$FILEMERGE -left "$OTHER" -right "$LOCAL" -ancestor "$BASE" -merge "$LOCAL"
[ $? -ne 0 ] && echo "FileMerge failed to launch" && failure
- $TEST "$LOCAL" -nt "$CHGTEST" && success || ask_if_merged
+ $TEST "$LOCAL" -nt "$CHGTEST" && conflicts_or_success || ask_if_merged
fi
if [ -n "$DISPLAY" ]; then
# try using kdiff3, which is fairly nice
if [ -n "$KDIFF3" ]; then
$KDIFF3 --auto "$BASE" "$BACKUP" "$OTHER" -o "$LOCAL" || failure
- success
+ conflicts_or_success
fi
# try using tkdiff, which is a bit less sophisticated
if [ -n "$TKDIFF" ]; then
$TKDIFF "$BACKUP" "$OTHER" -a "$BASE" -o "$LOCAL" || failure
- success
+ conflicts_or_success
fi
if [ -n "$MELD" ]; then
@@ -147,7 +161,7 @@
# use the file with conflicts
$MELD "$LOCAL.tmp.$RAND" "$LOCAL" "$OTHER" || failure
# Also it doesn't return good error code
- $TEST "$LOCAL" -nt "$CHGTEST" && success || ask_if_merged
+ $TEST "$LOCAL" -nt "$CHGTEST" && conflicts_or_success || ask_if_merged
fi
fi
@@ -158,7 +172,7 @@
$EDITOR "$LOCAL" || failure
# Some editors do not return meaningful error codes
# Do not take any chances
- $TEST "$LOCAL" -nt "$CHGTEST" && success || ask_if_merged
+ $TEST "$LOCAL" -nt "$CHGTEST" && conflicts_or_success || ask_if_merged
fi
# attempt to manually merge with diff and patch
--- a/mercurial/changelog.py Sat Jul 21 17:36:45 2007 +0200
+++ b/mercurial/changelog.py Sat Jul 21 17:37:39 2007 +0200
@@ -58,7 +58,6 @@
def read(self, count=-1):
'''only trick here is reads that span real file and data'''
ret = ""
- old_offset = self.offset
if self.offset < self.size:
s = self.fp.read(count)
ret = s
@@ -131,7 +130,10 @@
return extra
def encode_extra(self, d):
- items = [_string_escape(":".join(t)) for t in d.iteritems()]
+ # keys must be sorted to produce a deterministic changelog entry
+ keys = d.keys()
+ keys.sort()
+ items = [_string_escape('%s:%s' % (k, d[k])) for k in keys]
return "\0".join(items)
def extract(self, text):
--- a/mercurial/commands.py Sat Jul 21 17:36:45 2007 +0200
+++ b/mercurial/commands.py Sat Jul 21 17:37:39 2007 +0200
@@ -8,7 +8,7 @@
import demandimport; demandimport.enable()
from node import *
from i18n import _
-import bisect, os, re, sys, urllib, shlex, stat
+import bisect, os, re, sys, urllib, stat
import ui, hg, util, revlog, bundlerepo, extensions
import difflib, patch, time, help, mdiff, tempfile
import errno, version, socket
@@ -70,19 +70,31 @@
detects as binary. With -a, annotate will generate an annotation
anyway, probably with undesirable results.
"""
- getdate = util.cachefunc(lambda x: util.datestr(x.date()))
+ getdate = util.cachefunc(lambda x: util.datestr(x[0].date()))
if not pats:
raise util.Abort(_('at least one file name or pattern required'))
- opmap = [['user', lambda x: ui.shortuser(x.user())],
- ['number', lambda x: str(x.rev())],
- ['changeset', lambda x: short(x.node())],
- ['date', getdate], ['follow', lambda x: x.path()]]
+ opmap = [('user', lambda x: ui.shortuser(x[0].user())),
+ ('number', lambda x: str(x[0].rev())),
+ ('changeset', lambda x: short(x[0].node())),
+ ('date', getdate),
+ ('follow', lambda x: x[0].path()),
+ ]
+
if (not opts['user'] and not opts['changeset'] and not opts['date']
and not opts['follow']):
opts['number'] = 1
+ linenumber = opts.get('line_number') is not None
+ if (linenumber and (not opts['changeset']) and (not opts['number'])):
+ raise util.Abort(_('at least one of -n/-c is required for -l'))
+
+ funcmap = [func for op, func in opmap if opts.get(op)]
+ if linenumber:
+ lastfunc = funcmap[-1]
+ funcmap[-1] = lambda x: "%s:%s" % (lastfunc(x), x[1])
+
ctx = repo.changectx(opts['rev'])
for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
@@ -92,15 +104,15 @@
ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
continue
- lines = fctx.annotate(follow=opts.get('follow'))
+ lines = fctx.annotate(follow=opts.get('follow'),
+ linenumber=linenumber)
pieces = []
- for o, f in opmap:
- if opts[o]:
- l = [f(n) for n, dummy in lines]
- if l:
- m = max(map(len, l))
- pieces.append(["%*s" % (m, x) for x in l])
+ for f in funcmap:
+ l = [f(n) for n, dummy in lines]
+ if l:
+ m = max(map(len, l))
+ pieces.append(["%*s" % (m, x) for x in l])
if pieces:
for p, l in zip(zip(*pieces), lines):
@@ -130,7 +142,10 @@
The default is the basename of the archive, with suffixes removed.
'''
- node = repo.changectx(opts['rev']).node()
+ ctx = repo.changectx(opts['rev'])
+ if not ctx:
+ raise util.Abort(_('repository has no revisions'))
+ node = ctx.node()
dest = cmdutil.make_filename(repo, dest, node)
if os.path.realpath(dest) == repo.root:
raise util.Abort(_('repository root cannot be destination'))
@@ -835,7 +850,7 @@
'''test Mercurial installation'''
def writetemp(contents):
- (fd, name) = tempfile.mkstemp()
+ (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
f = os.fdopen(fd, "wb")
f.write(contents)
f.close()
@@ -2182,7 +2197,6 @@
Modified files and added files are not removed by default. To
remove them, use the -f/--force option.
"""
- names = []
if not opts['after'] and not pats:
raise util.Abort(_('no files specified'))
files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
@@ -2461,7 +2475,7 @@
parentui = ui.parentui or ui
optlist = ("name templates style address port ipv6"
- " accesslog errorlog webdir_conf")
+ " accesslog errorlog webdir_conf certificate")
for o in optlist.split():
if opts[o]:
parentui.setconfig("web", o, str(opts[o]))
@@ -2644,7 +2658,6 @@
bundle command.
"""
fnames = (fname1,) + fnames
- result = None
for fname in fnames:
if os.path.exists(fname):
f = open(fname, "rb")
@@ -2768,8 +2781,10 @@
('d', 'date', None, _('list the date')),
('n', 'number', None, _('list the revision number (default)')),
('c', 'changeset', None, _('list the changeset')),
+ ('l', 'line-number', None,
+ _('show line number at the first appearance'))
] + walkopts,
- _('hg annotate [-r REV] [-f] [-a] [-u] [-d] [-n] [-c] FILE...')),
+ _('hg annotate [-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...')),
"archive":
(archive,
[('', 'no-decode', None, _('do not pass files through decoders')),
@@ -3069,7 +3084,8 @@
('', 'stdio', None, _('for remote clients')),
('t', 'templates', '', _('web templates to use')),
('', 'style', '', _('template style to use')),
- ('6', 'ipv6', None, _('use IPv6 in addition to IPv4'))],
+ ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
+ ('', 'certificate', '', _('SSL certificate file'))],
_('hg serve [OPTION]...')),
"^status|st":
(status,
@@ -3120,6 +3136,8 @@
"version": (version_, [], _('hg version')),
}
+extensions.commandtable = table
+
norepo = ("clone init version help debugancestor debugcomplete debugdata"
" debugindex debugindexdot debugdate debuginstall")
optionalrepo = ("paths serve showconfig")
--- a/mercurial/context.py Sat Jul 21 17:36:45 2007 +0200
+++ b/mercurial/context.py Sat Jul 21 17:37:39 2007 +0200
@@ -184,7 +184,7 @@
def __eq__(self, other):
try:
return (self._path == other._path
- and self._changeid == other._changeid)
+ and self._fileid == other._fileid)
except AttributeError:
return False
@@ -240,14 +240,32 @@
return [filectx(self._repo, self._path, fileid=x,
filelog=self._filelog) for x in c]
- def annotate(self, follow=False):
+ def annotate(self, follow=False, linenumber=None):
'''returns a list of tuples of (ctx, line) for each line
in the file, where ctx is the filectx of the node where
- that line was last changed'''
+ that line was last changed.
+ This returns tuples of ((ctx, linenumber), line) for each line,
+ if "linenumber" parameter is NOT "None".
+ In such tuples, linenumber means one at the first appearance
+ in the managed file.
+ To reduce annotation cost,
+ this returns fixed value(False is used) as linenumber,
+ if "linenumber" parameter is "False".'''
- def decorate(text, rev):
+ def decorate_compat(text, rev):
return ([rev] * len(text.splitlines()), text)
+ def without_linenumber(text, rev):
+ return ([(rev, False)] * len(text.splitlines()), text)
+
+ def with_linenumber(text, rev):
+ size = len(text.splitlines())
+ return ([(rev, i) for i in xrange(1, size + 1)], text)
+
+ decorate = (((linenumber is None) and decorate_compat) or
+ (linenumber and with_linenumber) or
+ without_linenumber)
+
def pair(parent, child):
for a1, a2, b1, b2 in bdiff.blocks(parent[1], child[1]):
child[0][b1:b2] = parent[0][a1:a2]
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/diffhelpers.c Sat Jul 21 17:37:39 2007 +0200
@@ -0,0 +1,150 @@
+/*
+ * diffhelpers.c - helper routines for mpatch
+ *
+ * Copyright 2007 Chris Mason <chris.mason@oracle.com>
+ *
+ * This software may be used and distributed according to the terms
+ * of the GNU General Public License v2, incorporated herein by reference.
+ */
+
+#include <Python.h>
+#include <stdlib.h>
+#include <string.h>
+
+static char diffhelpers_doc[] = "Efficient diff parsing";
+static PyObject *diffhelpers_Error;
+
+
+/* fixup the last lines of a and b when the patch has no newline at eof */
+static void _fix_newline(PyObject *hunk, PyObject *a, PyObject *b)
+{
+ int hunksz = PyList_Size(hunk);
+ PyObject *s = PyList_GET_ITEM(hunk, hunksz-1);
+ char *l = PyString_AS_STRING(s);
+ int sz = PyString_GET_SIZE(s);
+ int alen = PyList_Size(a);
+ int blen = PyList_Size(b);
+ char c = l[0];
+
+ PyObject *hline = PyString_FromStringAndSize(l, sz-1);
+ if (c == ' ' || c == '+') {
+ PyObject *rline = PyString_FromStringAndSize(l+1, sz-2);
+ PyList_SetItem(b, blen-1, rline);
+ }
+ if (c == ' ' || c == '-') {
+ Py_INCREF(hline);
+ PyList_SetItem(a, alen-1, hline);
+ }
+ PyList_SetItem(hunk, hunksz-1, hline);
+}
+
+/* python callable form of _fix_newline */
+static PyObject *
+fix_newline(PyObject *self, PyObject *args)
+{
+ PyObject *hunk, *a, *b;
+ if (!PyArg_ParseTuple(args, "OOO", &hunk, &a, &b))
+ return NULL;
+ _fix_newline(hunk, a, b);
+ return Py_BuildValue("l", 0);
+}
+
+/*
+ * read lines from fp into the hunk. The hunk is parsed into two arrays
+ * a and b. a gets the old state of the text, b gets the new state
+ * The control char from the hunk is saved when inserting into a, but not b
+ * (for performance while deleting files)
+ */
+static PyObject *
+addlines(PyObject *self, PyObject *args)
+{
+
+ PyObject *fp, *hunk, *a, *b, *x;
+ int i;
+ int lena, lenb;
+ int num;
+ int todoa, todob;
+ char *s, c;
+ PyObject *l;
+ if (!PyArg_ParseTuple(args, "OOiiOO", &fp, &hunk, &lena, &lenb, &a, &b))
+ return NULL;
+
+ while(1) {
+ todoa = lena - PyList_Size(a);
+ todob = lenb - PyList_Size(b);
+ num = todoa > todob ? todoa : todob;
+ if (num == 0)
+ break;
+ for (i = 0 ; i < num ; i++) {
+ x = PyFile_GetLine(fp, 0);
+ s = PyString_AS_STRING(x);
+ c = *s;
+ if (strcmp(s, "\\ No newline at end of file\n") == 0) {
+ _fix_newline(hunk, a, b);
+ continue;
+ }
+ PyList_Append(hunk, x);
+ if (c == '+') {
+ l = PyString_FromString(s + 1);
+ PyList_Append(b, l);
+ Py_DECREF(l);
+ } else if (c == '-') {
+ PyList_Append(a, x);
+ } else {
+ l = PyString_FromString(s + 1);
+ PyList_Append(b, l);
+ Py_DECREF(l);
+ PyList_Append(a, x);
+ }
+ Py_DECREF(x);
+ }
+ }
+ return Py_BuildValue("l", 0);
+}
+
+/*
+ * compare the lines in a with the lines in b. a is assumed to have
+ * a control char at the start of each line, this char is ignored in the
+ * compare
+ */
+static PyObject *
+testhunk(PyObject *self, PyObject *args)
+{
+
+ PyObject *a, *b;
+ long bstart;
+ int alen, blen;
+ int i;
+ char *sa, *sb;
+
+ if (!PyArg_ParseTuple(args, "OOl", &a, &b, &bstart))
+ return NULL;
+ alen = PyList_Size(a);
+ blen = PyList_Size(b);
+ if (alen > blen - bstart) {
+ return Py_BuildValue("l", -1);
+ }
+ for (i = 0 ; i < alen ; i++) {
+ sa = PyString_AS_STRING(PyList_GET_ITEM(a, i));
+ sb = PyString_AS_STRING(PyList_GET_ITEM(b, i + bstart));
+ if (strcmp(sa+1, sb) != 0)
+ return Py_BuildValue("l", -1);
+ }
+ return Py_BuildValue("l", 0);
+}
+
+static PyMethodDef methods[] = {
+ {"addlines", addlines, METH_VARARGS, "add lines to a hunk\n"},
+ {"fix_newline", fix_newline, METH_VARARGS, "fixup newline counters\n"},
+ {"testhunk", testhunk, METH_VARARGS, "test lines in a hunk\n"},
+ {NULL, NULL}
+};
+
+PyMODINIT_FUNC
+initdiffhelpers(void)
+{
+ Py_InitModule3("diffhelpers", methods, diffhelpers_doc);
+ diffhelpers_Error = PyErr_NewException("diffhelpers.diffhelpersError",
+ NULL, NULL);
+}
+
--- a/mercurial/extensions.py Sat Jul 21 17:36:45 2007 +0200
+++ b/mercurial/extensions.py Sat Jul 21 17:37:39 2007 +0200
@@ -6,10 +6,12 @@
# of the GNU General Public License, incorporated herein by reference.
import imp, os
-import commands, hg, util, sys
+import util, sys
from i18n import _
_extensions = {}
+commandtable = {}
+setuphooks = []
def find(name):
'''return module with given extension name'''
@@ -54,13 +56,13 @@
uisetup(ui)
reposetup = getattr(mod, 'reposetup', None)
if reposetup:
- hg.repo_setup_hooks.append(reposetup)
+ setuphooks.append(reposetup)
cmdtable = getattr(mod, 'cmdtable', {})
- overrides = [cmd for cmd in cmdtable if cmd in commands.table]
+ overrides = [cmd for cmd in cmdtable if cmd in commandtable]
if overrides:
ui.warn(_("extension '%s' overrides commands: %s\n")
% (name, " ".join(overrides)))
- commands.table.update(cmdtable)
+ commandtable.update(cmdtable)
def loadall(ui):
result = ui.configitems("extensions")
--- a/mercurial/hg.py Sat Jul 21 17:36:45 2007 +0200
+++ b/mercurial/hg.py Sat Jul 21 17:37:39 2007 +0200
@@ -10,7 +10,7 @@
from repo import *
from i18n import _
import localrepo, bundlerepo, httprepo, sshrepo, statichttprepo
-import errno, lock, os, shutil, util, cmdutil
+import errno, lock, os, shutil, util, cmdutil, extensions
import merge as _merge
import verify as _verify
@@ -21,13 +21,11 @@
schemes = {
'bundle': bundlerepo,
'file': _local,
- 'hg': httprepo,
'http': httprepo,
'https': httprepo,
- 'old-http': statichttprepo,
'ssh': sshrepo,
'static-http': statichttprepo,
- }
+}
def _lookup(path):
scheme = 'file'
@@ -50,13 +48,11 @@
return False
return repo.local()
-repo_setup_hooks = []
-
def repository(ui, path='', create=False):
"""return a repository object for the specified path"""
repo = _lookup(path).instance(ui, path, create)
ui = getattr(repo, "ui", ui)
- for hook in repo_setup_hooks:
+ for hook in extensions.setuphooks:
hook(ui, repo)
return repo
--- a/mercurial/hgweb/hgweb_mod.py Sat Jul 21 17:36:45 2007 +0200
+++ b/mercurial/hgweb/hgweb_mod.py Sat Jul 21 17:37:39 2007 +0200
@@ -64,7 +64,7 @@
class hgweb(object):
def __init__(self, repo, name=None):
- if type(repo) == type(""):
+ if isinstance(repo, str):
self.repo = hg.repository(ui.ui(report_untrusted=False), repo)
else:
self.repo = repo
@@ -787,9 +787,17 @@
style = req.form['style'][0]
mapfile = style_map(self.templatepath, style)
+ proto = req.env.get('wsgi.url_scheme')
+ if proto == 'https':
+ proto = 'https'
+ default_port = "443"
+ else:
+ proto = 'http'
+ default_port = "80"
+
port = req.env["SERVER_PORT"]
- port = port != "80" and (":" + port) or ""
- urlbase = 'http://%s%s' % (req.env['SERVER_NAME'], port)
+ port = port != default_port and (":" + port) or ""
+ urlbase = '%s://%s%s' % (proto, req.env['SERVER_NAME'], port)
staticurl = self.config("web", "staticurl") or req.url + 'static/'
if not staticurl.endswith('/'):
staticurl += '/'
@@ -1063,7 +1071,7 @@
# replayed
ssl_req = self.configbool('web', 'push_ssl', True)
if ssl_req:
- if not req.env.get('HTTPS'):
+ if req.env.get('wsgi.url_scheme') != 'https':
bail(_('ssl required\n'))
return
proto = 'https'
--- a/mercurial/hgweb/hgwebdir_mod.py Sat Jul 21 17:36:45 2007 +0200
+++ b/mercurial/hgweb/hgwebdir_mod.py Sat Jul 21 17:37:39 2007 +0200
@@ -90,8 +90,12 @@
url = req.env['REQUEST_URI'].split('?')[0]
if not url.endswith('/'):
url += '/'
+ pathinfo = req.env.get('PATH_INFO', '').strip('/') + '/'
+ base = url[:len(url) - len(pathinfo)]
+ if not base.endswith('/'):
+ base += '/'
- staticurl = config('web', 'staticurl') or url + 'static/'
+ staticurl = config('web', 'staticurl') or base + 'static/'
if not staticurl.endswith('/'):
staticurl += '/'
@@ -118,7 +122,7 @@
yield {"type" : i[0], "extension": i[1],
"node": nodeid, "url": url}
- def entries(sortcolumn="", descending=False, **map):
+ def entries(sortcolumn="", descending=False, subdir="", **map):
def sessionvars(**map):
fields = []
if req.form.has_key('style'):
@@ -134,6 +138,10 @@
rows = []
parity = paritygen(self.stripecount)
for name, path in self.repos:
+ if not name.startswith(subdir):
+ continue
+ name = name[len(subdir):]
+
u = ui.ui(parentui=parentui)
try:
u.readconfig(os.path.join(path, '.hg', 'hgrc'))
@@ -185,6 +193,25 @@
row['parity'] = parity.next()
yield row
+ def makeindex(req, subdir=""):
+ sortable = ["name", "description", "contact", "lastchange"]
+ sortcolumn, descending = self.repos_sorted
+ if req.form.has_key('sort'):
+ sortcolumn = req.form['sort'][0]
+ descending = sortcolumn.startswith('-')
+ if descending:
+ sortcolumn = sortcolumn[1:]
+ if sortcolumn not in sortable:
+ sortcolumn = ""
+
+ sort = [("sort_%s" % column,
+ "%s%s" % ((not descending and column == sortcolumn)
+ and "-" or "", column))
+ for column in sortable]
+ req.write(tmpl("index", entries=entries, subdir=subdir,
+ sortcolumn=sortcolumn, descending=descending,
+ **dict(sort)))
+
try:
virtual = req.env.get("PATH_INFO", "").strip('/')
if virtual.startswith('static/'):
@@ -193,25 +220,32 @@
req.write(staticfile(static, fname, req) or
tmpl('error', error='%r not found' % fname))
elif virtual:
+ repos = dict(self.repos)
while virtual:
- real = dict(self.repos).get(virtual)
+ real = repos.get(virtual)
if real:
- break
+ req.env['REPO_NAME'] = virtual
+ try:
+ repo = hg.repository(parentui, real)
+ hgweb(repo).run_wsgi(req)
+ except IOError, inst:
+ req.write(tmpl("error", error=inst.strerror))
+ except hg.RepoError, inst:
+ req.write(tmpl("error", error=str(inst)))
+ return
+
+ # browse subdirectories
+ subdir = virtual + '/'
+ if [r for r in repos if r.startswith(subdir)]:
+ makeindex(req, subdir)
+ return
+
up = virtual.rfind('/')
if up < 0:
break
virtual = virtual[:up]
- if real:
- req.env['REPO_NAME'] = virtual
- try:
- repo = hg.repository(parentui, real)
- hgweb(repo).run_wsgi(req)
- except IOError, inst:
- req.write(tmpl("error", error=inst.strerror))
- except hg.RepoError, inst:
- req.write(tmpl("error", error=str(inst)))
- else:
- req.write(tmpl("notfound", repo=virtual))
+
+ req.write(tmpl("notfound", repo=virtual))
else:
if req.form.has_key('static'):
static = os.path.join(templater.templatepath(), "static")
@@ -219,22 +253,6 @@
req.write(staticfile(static, fname, req)
or tmpl("error", error="%r not found" % fname))
else:
- sortable = ["name", "description", "contact", "lastchange"]
- sortcolumn, descending = self.repos_sorted
- if req.form.has_key('sort'):
- sortcolumn = req.form['sort'][0]
- descending = sortcolumn.startswith('-')
- if descending:
- sortcolumn = sortcolumn[1:]
- if sortcolumn not in sortable:
- sortcolumn = ""
-
- sort = [("sort_%s" % column,
- "%s%s" % ((not descending and column == sortcolumn)
- and "-" or "", column))
- for column in sortable]
- req.write(tmpl("index", entries=entries,
- sortcolumn=sortcolumn, descending=descending,
- **dict(sort)))
+ makeindex(req)
finally:
tmpl = None
--- a/mercurial/hgweb/server.py Sat Jul 21 17:36:45 2007 +0200
+++ b/mercurial/hgweb/server.py Sat Jul 21 17:37:39 2007 +0200
@@ -37,6 +37,9 @@
self.handler.log_error("HG error: %s", msg)
class _hgwebhandler(object, BaseHTTPServer.BaseHTTPRequestHandler):
+
+ url_scheme = 'http'
+
def __init__(self, *args, **kargs):
self.protocol_version = 'HTTP/1.1'
BaseHTTPServer.BaseHTTPRequestHandler.__init__(self, *args, **kargs)
@@ -53,13 +56,16 @@
self.log_date_time_string(),
format % args))
+ def do_write(self):
+ try:
+ self.do_hgweb()
+ except socket.error, inst:
+ if inst[0] != errno.EPIPE:
+ raise
+
def do_POST(self):
try:
- try:
- self.do_hgweb()
- except socket.error, inst:
- if inst[0] != errno.EPIPE:
- raise
+ self.do_write()
except StandardError, inst:
self._start_response("500 Internal Server Error", [])
self._write("Internal Server Error")
@@ -101,7 +107,7 @@
env[hkey] = hval
env['SERVER_PROTOCOL'] = self.request_version
env['wsgi.version'] = (1, 0)
- env['wsgi.url_scheme'] = 'http'
+ env['wsgi.url_scheme'] = self.url_scheme
env['wsgi.input'] = self.rfile
env['wsgi.errors'] = _error_logger(self)
env['wsgi.multithread'] = isinstance(self.server,
@@ -164,6 +170,31 @@
self.wfile.write(data)
self.wfile.flush()
+class _shgwebhandler(_hgwebhandler):
+
+ url_scheme = 'https'
+
+ def setup(self):
+ self.connection = self.request
+ self.rfile = socket._fileobject(self.request, "rb", self.rbufsize)
+ self.wfile = socket._fileobject(self.request, "wb", self.wbufsize)
+
+ def do_write(self):
+ from OpenSSL.SSL import SysCallError
+ try:
+ super(_shgwebhandler, self).do_write()
+ except SysCallError, inst:
+ if inst.args[0] != errno.EPIPE:
+ raise
+
+ def handle_one_request(self):
+ from OpenSSL.SSL import SysCallError, ZeroReturnError
+ try:
+ super(_shgwebhandler, self).handle_one_request()
+ except (SysCallError, ZeroReturnError):
+ self.close_connection = True
+ pass
+
def create_server(ui, repo):
use_threads = True
@@ -176,6 +207,7 @@
port = int(repo.ui.config("web", "port", 8000))
use_ipv6 = repo.ui.configbool("web", "ipv6")
webdir_conf = repo.ui.config("web", "webdir_conf")
+ ssl_cert = repo.ui.config("web", "certificate")
accesslog = openlog(repo.ui.config("web", "accesslog", "-"), sys.stdout)
errorlog = openlog(repo.ui.config("web", "errorlog", "-"), sys.stderr)
@@ -222,6 +254,19 @@
self.addr, self.port = addr, port
+ if ssl_cert:
+ try:
+ from OpenSSL import SSL
+ ctx = SSL.Context(SSL.SSLv23_METHOD)
+ except ImportError:
+ raise util.Abort("SSL support is unavailable")
+ ctx.use_privatekey_file(ssl_cert)
+ ctx.use_certificate_file(ssl_cert)
+ sock = socket.socket(self.address_family, self.socket_type)
+ self.socket = SSL.Connection(ctx, sock)
+ self.server_bind()
+ self.server_activate()
+
class IPv6HTTPServer(MercurialHTTPServer):
address_family = getattr(socket, 'AF_INET6', None)
@@ -230,10 +275,15 @@
raise hg.RepoError(_('IPv6 not available on this system'))
super(IPv6HTTPServer, self).__init__(*args, **kwargs)
+ if ssl_cert:
+ handler = _shgwebhandler
+ else:
+ handler = _hgwebhandler
+
try:
if use_ipv6:
- return IPv6HTTPServer((address, port), _hgwebhandler)
+ return IPv6HTTPServer((address, port), handler)
else:
- return MercurialHTTPServer((address, port), _hgwebhandler)
+ return MercurialHTTPServer((address, port), handler)
except socket.error, inst:
raise util.Abort(_('cannot start server: %s') % inst.args[1])
--- a/mercurial/hgweb/wsgicgi.py Sat Jul 21 17:36:45 2007 +0200
+++ b/mercurial/hgweb/wsgicgi.py Sat Jul 21 17:37:39 2007 +0200
@@ -23,7 +23,7 @@
environ['wsgi.multiprocess'] = True
environ['wsgi.run_once'] = True
- if environ.get('HTTPS','off') in ('on','1'):
+ if environ.get('HTTPS','off').lower() in ('on','1','yes'):
environ['wsgi.url_scheme'] = 'https'
else:
environ['wsgi.url_scheme'] = 'http'
--- a/mercurial/httprepo.py Sat Jul 21 17:36:45 2007 +0200
+++ b/mercurial/httprepo.py Sat Jul 21 17:37:39 2007 +0200
@@ -409,9 +409,6 @@
def instance(ui, path, create):
if create:
raise util.Abort(_('cannot create new http repository'))
- if path.startswith('hg:'):
- ui.warn(_("hg:// syntax is deprecated, please use http:// instead\n"))
- path = 'http:' + path[3:]
if path.startswith('https:'):
return httpsrepository(ui, path)
return httprepository(ui, path)
--- a/mercurial/localrepo.py Sat Jul 21 17:36:45 2007 +0200
+++ b/mercurial/localrepo.py Sat Jul 21 17:37:39 2007 +0200
@@ -109,7 +109,8 @@
tag_disallowed = ':\r\n'
- def _tag(self, name, node, message, local, user, date, parent=None):
+ def _tag(self, name, node, message, local, user, date, parent=None,
+ extra={}):
use_dirstate = parent is None
for c in self.tag_disallowed:
@@ -158,7 +159,8 @@
if use_dirstate and self.dirstate.state('.hgtags') == '?':
self.add(['.hgtags'])
- tagnode = self.commit(['.hgtags'], message, user, date, p1=parent)
+ tagnode = self.commit(['.hgtags'], message, user, date, p1=parent,
+ extra=extra)
self.hook('tag', node=hex(node), tag=name, local=local)
--- a/mercurial/lock.py Sat Jul 21 17:36:45 2007 +0200
+++ b/mercurial/lock.py Sat Jul 21 17:37:39 2007 +0200
@@ -29,14 +29,13 @@
# old-style lock: symlink to pid
# new-style lock: symlink to hostname:pid
+ _host = None
+
def __init__(self, file, timeout=-1, releasefn=None, desc=None):
self.f = file
self.held = 0
self.timeout = timeout
self.releasefn = releasefn
- self.id = None
- self.host = None
- self.pid = None
self.desc = desc
self.lock()
@@ -59,13 +58,12 @@
inst.locker)
def trylock(self):
- if self.id is None:
- self.host = socket.gethostname()
- self.pid = os.getpid()
- self.id = '%s:%s' % (self.host, self.pid)
+ if lock._host is None:
+ lock._host = socket.gethostname()
+ lockname = '%s:%s' % (lock._host, os.getpid())
while not self.held:
try:
- util.makelock(self.id, self.f)
+ util.makelock(lockname, self.f)
self.held = 1
except (OSError, IOError), why:
if why.errno == errno.EEXIST:
@@ -93,7 +91,7 @@
host, pid = locker.split(":", 1)
except ValueError:
return locker
- if host != self.host:
+ if host != lock._host:
return locker
try:
pid = int(pid)
--- a/mercurial/patch.py Sat Jul 21 17:36:45 2007 +0200
+++ b/mercurial/patch.py Sat Jul 21 17:37:39 2007 +0200
@@ -1,16 +1,23 @@
# patch.py - patch file parsing routines
#
# Copyright 2006 Brendan Cully <brendan@kublai.com>
+# Copyright 2007 Chris Mason <chris.mason@oracle.com>
#
# This software may be used and distributed according to the terms
# of the GNU General Public License, incorporated herein by reference.
from i18n import _
from node import *
-import base85, cmdutil, mdiff, util, context, revlog
+import base85, cmdutil, mdiff, util, context, revlog, diffhelpers
import cStringIO, email.Parser, os, popen2, re, sha
import sys, tempfile, zlib
+class PatchError(Exception):
+ pass
+
+class NoHunks(PatchError):
+ pass
+
# helper functions
def copyfile(src, dst, basedir=None):
@@ -50,7 +57,7 @@
try:
msg = email.Parser.Parser().parse(fileobj)
- message = msg['Subject']
+ subject = msg['Subject']
user = msg['From']
# should try to parse msg['Date']
date = None
@@ -58,18 +65,18 @@
branch = None
parents = []
- if message:
- if message.startswith('[PATCH'):
- pend = message.find(']')
+ if subject:
+ if subject.startswith('[PATCH'):
+ pend = subject.find(']')
if pend >= 0:
- message = message[pend+1:].lstrip()
- message = message.replace('\n\t', ' ')
- ui.debug('Subject: %s\n' % message)
+ subject = subject[pend+1:].lstrip()
+ subject = subject.replace('\n\t', ' ')
+ ui.debug('Subject: %s\n' % subject)
if user:
ui.debug('From: %s\n' % user)
diffs_seen = 0
ok_types = ('text/plain', 'text/x-diff', 'text/x-patch')
-
+ message = ''
for part in msg.walk():
content_type = part.get_content_type()
ui.debug('Content-Type: %s\n' % content_type)
@@ -84,9 +91,6 @@
ui.debug(_('found patch at byte %d\n') % m.start(0))
diffs_seen += 1
cfp = cStringIO.StringIO()
- if message:
- cfp.write(message)
- cfp.write('\n')
for line in payload[:m.start(0)].splitlines():
if line.startswith('# HG changeset patch'):
ui.debug(_('patch generated by hg export\n'))
@@ -94,6 +98,7 @@
# drop earlier commit message content
cfp.seek(0)
cfp.truncate()
+ subject = None
elif hgpatch:
if line.startswith('# User '):
user = line[7:]
@@ -123,6 +128,8 @@
os.unlink(tmpname)
raise
+ if subject and not message.startswith(subject):
+ message = '%s\n%s' % (subject, message)
tmpfp.close()
if not diffs_seen:
os.unlink(tmpname)
@@ -135,7 +142,7 @@
GP_FILTER = 1 << 1 # there's some copy/rename operation
GP_BINARY = 1 << 2 # there's a binary patch
-def readgitpatch(patchname):
+def readgitpatch(fp, firstline):
"""extract git-style metadata about patches from <patchname>"""
class gitpatch:
"op is one of ADD, DELETE, RENAME, MODIFY or COPY"
@@ -148,16 +155,20 @@
self.lineno = 0
self.binary = False
+ def reader(fp, firstline):
+ yield firstline
+ for line in fp:
+ yield line
+
# Filter patch for git information
gitre = re.compile('diff --git a/(.*) b/(.*)')
- pf = file(patchname)
gp = None
gitpatches = []
# Can have a git patch with only metadata, causing patch to complain
dopatch = 0
lineno = 0
- for line in pf:
+ for line in reader(fp, firstline):
lineno += 1
if line.startswith('diff --git'):
m = gitre.match(line)
@@ -204,157 +215,790 @@
return (dopatch, gitpatches)
-def dogitpatch(patchname, gitpatches, cwd=None):
- """Preprocess git patch so that vanilla patch can handle it"""
- def extractbin(fp):
- i = [0] # yuck
- def readline():
- i[0] += 1
- return fp.readline().rstrip()
- line = readline()
+def patch(patchname, ui, strip=1, cwd=None, files={}):
+ """apply <patchname> to the working directory.
+ returns whether patch was applied with fuzz factor."""
+ patcher = ui.config('ui', 'patch')
+ args = []
+ try:
+ if patcher:
+ return externalpatch(patcher, args, patchname, ui, strip, cwd,
+ files)
+ else:
+ try:
+ return internalpatch(patchname, ui, strip, cwd, files)
+ except NoHunks:
+ patcher = util.find_exe('gpatch') or util.find_exe('patch')
+ ui.debug('no valid hunks found; trying with %r instead\n' %
+ patcher)
+ if util.needbinarypatch():
+ args.append('--binary')
+ return externalpatch(patcher, args, patchname, ui, strip, cwd,
+ files)
+ except PatchError, err:
+ s = str(err)
+ if s:
+ raise util.Abort(s)
+ else:
+ raise util.Abort(_('patch failed to apply'))
+
+def externalpatch(patcher, args, patchname, ui, strip, cwd, files):
+ """use <patcher> to apply <patchname> to the working directory.
+ returns whether patch was applied with fuzz factor."""
+
+ fuzz = False
+ if cwd:
+ args.append('-d %s' % util.shellquote(cwd))
+ fp = os.popen('%s %s -p%d < %s' % (patcher, ' '.join(args), strip,
+ util.shellquote(patchname)))
+
+ for line in fp:
+ line = line.rstrip()
+ ui.note(line + '\n')
+ if line.startswith('patching file '):
+ pf = util.parse_patch_output(line)
+ printed_file = False
+ files.setdefault(pf, (None, None))
+ elif line.find('with fuzz') >= 0:
+ fuzz = True
+ if not printed_file:
+ ui.warn(pf + '\n')
+ printed_file = True
+ ui.warn(line + '\n')
+ elif line.find('saving rejects to file') >= 0:
+ ui.warn(line + '\n')
+ elif line.find('FAILED') >= 0:
+ if not printed_file:
+ ui.warn(pf + '\n')
+ printed_file = True
+ ui.warn(line + '\n')
+ code = fp.close()
+ if code:
+ raise PatchError(_("patch command failed: %s") %
+ util.explain_exit(code)[0])
+ return fuzz
+
+def internalpatch(patchname, ui, strip, cwd, files):
+ """use builtin patch to apply <patchname> to the working directory.
+ returns whether patch was applied with fuzz factor."""
+ fp = file(patchname, 'rb')
+ if cwd:
+ curdir = os.getcwd()
+ os.chdir(cwd)
+ try:
+ ret = applydiff(ui, fp, files, strip=strip)
+ finally:
+ if cwd:
+ os.chdir(curdir)
+ if ret < 0:
+ raise PatchError
+ return ret > 0
+
+# @@ -start,len +start,len @@ or @@ -start +start @@ if len is 1
+unidesc = re.compile('@@ -(\d+)(,(\d+))? \+(\d+)(,(\d+))? @@')
+contextdesc = re.compile('(---|\*\*\*) (\d+)(,(\d+))? (---|\*\*\*)')
+
+class patchfile:
+ def __init__(self, ui, fname):
+ self.fname = fname
+ self.ui = ui
+ try:
+ fp = file(fname, 'rb')
+ self.lines = fp.readlines()
+ self.exists = True
+ except IOError:
+ dirname = os.path.dirname(fname)
+ if dirname and not os.path.isdir(dirname):
+ dirs = dirname.split(os.path.sep)
+ d = ""
+ for x in dirs:
+ d = os.path.join(d, x)
+ if not os.path.isdir(d):
+ os.mkdir(d)
+ self.lines = []
+ self.exists = False
+
+ self.hash = {}
+ self.dirty = 0
+ self.offset = 0
+ self.rej = []
+ self.fileprinted = False
+ self.printfile(False)
+ self.hunks = 0
+
+ def printfile(self, warn):
+ if self.fileprinted:
+ return
+ if warn or self.ui.verbose:
+ self.fileprinted = True
+ s = _("patching file %s\n") % self.fname
+ if warn:
+ self.ui.warn(s)
+ else:
+ self.ui.note(s)
+
+
+ def findlines(self, l, linenum):
+ # looks through the hash and finds candidate lines. The
+ # result is a list of line numbers sorted based on distance
+ # from linenum
+ def sorter(a, b):
+ vala = abs(a - linenum)
+ valb = abs(b - linenum)
+ return cmp(vala, valb)
+
+ try:
+ cand = self.hash[l]
+ except:
+ return []
+
+ if len(cand) > 1:
+ # resort our list of potentials forward then back.
+ cand.sort(cmp=sorter)
+ return cand
+
+ def hashlines(self):
+ self.hash = {}
+ for x in xrange(len(self.lines)):
+ s = self.lines[x]
+ self.hash.setdefault(s, []).append(x)
+
+ def write_rej(self):
+ # our rejects are a little different from patch(1). This always
+ # creates rejects in the same form as the original patch. A file
+ # header is inserted so that you can run the reject through patch again
+ # without having to type the filename.
+
+ if not self.rej:
+ return
+ if self.hunks != 1:
+ hunkstr = "s"
+ else:
+ hunkstr = ""
+
+ fname = self.fname + ".rej"
+ self.ui.warn(
+ _("%d out of %d hunk%s FAILED -- saving rejects to file %s\n") %
+ (len(self.rej), self.hunks, hunkstr, fname))
+ try: os.unlink(fname)
+ except:
+ pass
+ fp = file(fname, 'wb')
+ base = os.path.basename(self.fname)
+ fp.write("--- %s\n+++ %s\n" % (base, base))
+ for x in self.rej:
+ for l in x.hunk:
+ fp.write(l)
+ if l[-1] != '\n':
+ fp.write("\n\ No newline at end of file\n")
+
+ def write(self, dest=None):
+ if self.dirty:
+ if not dest:
+ dest = self.fname
+ st = None
+ try:
+ st = os.lstat(dest)
+ if st.st_nlink > 1:
+ os.unlink(dest)
+ except: pass
+ fp = file(dest, 'wb')
+ if st:
+ os.chmod(dest, st.st_mode)
+ fp.writelines(self.lines)
+ fp.close()
+
+ def close(self):
+ self.write()
+ self.write_rej()
+
+ def apply(self, h, reverse):
+ if not h.complete():
+ raise PatchError(_("bad hunk #%d %s (%d %d %d %d)") %
+ (h.number, h.desc, len(h.a), h.lena, len(h.b),
+ h.lenb))
+
+ self.hunks += 1
+ if reverse:
+ h.reverse()
+
+ if self.exists and h.createfile():
+ self.ui.warn(_("file %s already exists\n") % self.fname)
+ self.rej.append(h)
+ return -1
+
+ if isinstance(h, binhunk):
+ if h.rmfile():
+ os.unlink(self.fname)
+ else:
+ self.lines[:] = h.new()
+ self.offset += len(h.new())
+ self.dirty = 1
+ return 0
+
+ # fast case first, no offsets, no fuzz
+ old = h.old()
+ # patch starts counting at 1 unless we are adding the file
+ if h.starta == 0:
+ start = 0
+ else:
+ start = h.starta + self.offset - 1
+ orig_start = start
+ if diffhelpers.testhunk(old, self.lines, start) == 0:
+ if h.rmfile():
+ os.unlink(self.fname)
+ else:
+ self.lines[start : start + h.lena] = h.new()
+ self.offset += h.lenb - h.lena
+ self.dirty = 1
+ return 0
+
+ # ok, we couldn't match the hunk. Lets look for offsets and fuzz it
+ self.hashlines()
+ if h.hunk[-1][0] != ' ':
+ # if the hunk tried to put something at the bottom of the file
+ # override the start line and use eof here
+ search_start = len(self.lines)
+ else:
+ search_start = orig_start
+
+ for fuzzlen in xrange(3):
+ for toponly in [ True, False ]:
+ old = h.old(fuzzlen, toponly)
+
+ cand = self.findlines(old[0][1:], search_start)
+ for l in cand:
+ if diffhelpers.testhunk(old, self.lines, l) == 0:
+ newlines = h.new(fuzzlen, toponly)
+ self.lines[l : l + len(old)] = newlines
+ self.offset += len(newlines) - len(old)
+ self.dirty = 1
+ if fuzzlen:
+ fuzzstr = "with fuzz %d " % fuzzlen
+ f = self.ui.warn
+ self.printfile(True)
+ else:
+ fuzzstr = ""
+ f = self.ui.note
+ offset = l - orig_start - fuzzlen
+ if offset == 1:
+ linestr = "line"
+ else:
+ linestr = "lines"
+ f(_("Hunk #%d succeeded at %d %s(offset %d %s).\n") %
+ (h.number, l+1, fuzzstr, offset, linestr))
+ return fuzzlen
+ self.printfile(True)
+ self.ui.warn(_("Hunk #%d FAILED at %d\n") % (h.number, orig_start))
+ self.rej.append(h)
+ return -1
+
+class hunk:
+ def __init__(self, desc, num, lr, context):
+ self.number = num
+ self.desc = desc
+ self.hunk = [ desc ]
+ self.a = []
+ self.b = []
+ if context:
+ self.read_context_hunk(lr)
+ else:
+ self.read_unified_hunk(lr)
+
+ def read_unified_hunk(self, lr):
+ m = unidesc.match(self.desc)
+ if not m:
+ raise PatchError(_("bad hunk #%d") % self.number)
+ self.starta, foo, self.lena, self.startb, foo2, self.lenb = m.groups()
+ if self.lena == None:
+ self.lena = 1
+ else:
+ self.lena = int(self.lena)
+ if self.lenb == None:
+ self.lenb = 1
+ else:
+ self.lenb = int(self.lenb)
+ self.starta = int(self.starta)
+ self.startb = int(self.startb)
+ diffhelpers.addlines(lr.fp, self.hunk, self.lena, self.lenb, self.a, self.b)
+ # if we hit eof before finishing out the hunk, the last line will
+ # be zero length. Lets try to fix it up.
+ while len(self.hunk[-1]) == 0:
+ del self.hunk[-1]
+ del self.a[-1]
+ del self.b[-1]
+ self.lena -= 1
+ self.lenb -= 1
+
+ def read_context_hunk(self, lr):
+ self.desc = lr.readline()
+ m = contextdesc.match(self.desc)
+ if not m:
+ raise PatchError(_("bad hunk #%d") % self.number)
+ foo, self.starta, foo2, aend, foo3 = m.groups()
+ self.starta = int(self.starta)
+ if aend == None:
+ aend = self.starta
+ self.lena = int(aend) - self.starta
+ if self.starta:
+ self.lena += 1
+ for x in xrange(self.lena):
+ l = lr.readline()
+ if l.startswith('---'):
+ lr.push(l)
+ break
+ s = l[2:]
+ if l.startswith('- ') or l.startswith('! '):
+ u = '-' + s
+ elif l.startswith(' '):
+ u = ' ' + s
+ else:
+ raise PatchError(_("bad hunk #%d old text line %d") %
+ (self.number, x))
+ self.a.append(u)
+ self.hunk.append(u)
+
+ l = lr.readline()
+ if l.startswith('\ '):
+ s = self.a[-1][:-1]
+ self.a[-1] = s
+ self.hunk[-1] = s
+ l = lr.readline()
+ m = contextdesc.match(l)
+ if not m:
+ raise PatchError(_("bad hunk #%d") % self.number)
+ foo, self.startb, foo2, bend, foo3 = m.groups()
+ self.startb = int(self.startb)
+ if bend == None:
+ bend = self.startb
+ self.lenb = int(bend) - self.startb
+ if self.startb:
+ self.lenb += 1
+ hunki = 1
+ for x in xrange(self.lenb):
+ l = lr.readline()
+ if l.startswith('\ '):
+ s = self.b[-1][:-1]
+ self.b[-1] = s
+ self.hunk[hunki-1] = s
+ continue
+ if not l:
+ lr.push(l)
+ break
+ s = l[2:]
+ if l.startswith('+ ') or l.startswith('! '):
+ u = '+' + s
+ elif l.startswith(' '):
+ u = ' ' + s
+ elif len(self.b) == 0:
+ # this can happen when the hunk does not add any lines
+ lr.push(l)
+ break
+ else:
+ raise PatchError(_("bad hunk #%d old text line %d") %
+ (self.number, x))
+ self.b.append(s)
+ while True:
+ if hunki >= len(self.hunk):
+ h = ""
+ else:
+ h = self.hunk[hunki]
+ hunki += 1
+ if h == u:
+ break
+ elif h.startswith('-'):
+ continue
+ else:
+ self.hunk.insert(hunki-1, u)
+ break
+
+ if not self.a:
+ # this happens when lines were only added to the hunk
+ for x in self.hunk:
+ if x.startswith('-') or x.startswith(' '):
+ self.a.append(x)
+ if not self.b:
+ # this happens when lines were only deleted from the hunk
+ for x in self.hunk:
+ if x.startswith('+') or x.startswith(' '):
+ self.b.append(x[1:])
+ # @@ -start,len +start,len @@
+ self.desc = "@@ -%d,%d +%d,%d @@\n" % (self.starta, self.lena,
+ self.startb, self.lenb)
+ self.hunk[0] = self.desc
+
+ def reverse(self):
+ origlena = self.lena
+ origstarta = self.starta
+ self.lena = self.lenb
+ self.starta = self.startb
+ self.lenb = origlena
+ self.startb = origstarta
+ self.a = []
+ self.b = []
+ # self.hunk[0] is the @@ description
+ for x in xrange(1, len(self.hunk)):
+ o = self.hunk[x]
+ if o.startswith('-'):
+ n = '+' + o[1:]
+ self.b.append(o[1:])
+ elif o.startswith('+'):
+ n = '-' + o[1:]
+ self.a.append(n)
+ else:
+ n = o
+ self.b.append(o[1:])
+ self.a.append(o)
+ self.hunk[x] = o
+
+ def fix_newline(self):
+ diffhelpers.fix_newline(self.hunk, self.a, self.b)
+
+ def complete(self):
+ return len(self.a) == self.lena and len(self.b) == self.lenb
+
+ def createfile(self):
+ return self.starta == 0 and self.lena == 0
+
+ def rmfile(self):
+ return self.startb == 0 and self.lenb == 0
+
+ def fuzzit(self, l, fuzz, toponly):
+ # this removes context lines from the top and bottom of list 'l'. It
+ # checks the hunk to make sure only context lines are removed, and then
+ # returns a new shortened list of lines.
+ fuzz = min(fuzz, len(l)-1)
+ if fuzz:
+ top = 0
+ bot = 0
+ hlen = len(self.hunk)
+ for x in xrange(hlen-1):
+ # the hunk starts with the @@ line, so use x+1
+ if self.hunk[x+1][0] == ' ':
+ top += 1
+ else:
+ break
+ if not toponly:
+ for x in xrange(hlen-1):
+ if self.hunk[hlen-bot-1][0] == ' ':
+ bot += 1
+ else:
+ break
+
+ # top and bot now count context in the hunk
+ # adjust them if either one is short
+ context = max(top, bot, 3)
+ if bot < context:
+ bot = max(0, fuzz - (context - bot))
+ else:
+ bot = min(fuzz, bot)
+ if top < context:
+ top = max(0, fuzz - (context - top))
+ else:
+ top = min(fuzz, top)
+
+ return l[top:len(l)-bot]
+ return l
+
+ def old(self, fuzz=0, toponly=False):
+ return self.fuzzit(self.a, fuzz, toponly)
+
+ def newctrl(self):
+ res = []
+ for x in self.hunk:
+ c = x[0]
+ if c == ' ' or c == '+':
+ res.append(x)
+ return res
+
+ def new(self, fuzz=0, toponly=False):
+ return self.fuzzit(self.b, fuzz, toponly)
+
+class binhunk:
+ 'A binary patch file. Only understands literals so far.'
+ def __init__(self, gitpatch):
+ self.gitpatch = gitpatch
+ self.text = None
+ self.hunk = ['GIT binary patch\n']
+
+ def createfile(self):
+ return self.gitpatch.op in ('ADD', 'RENAME', 'COPY')
+
+ def rmfile(self):
+ return self.gitpatch.op == 'DELETE'
+
+ def complete(self):
+ return self.text is not None
+
+ def new(self):
+ return [self.text]
+
+ def extract(self, fp):
+ line = fp.readline()
+ self.hunk.append(line)
while line and not line.startswith('literal '):
- line = readline()
+ line = fp.readline()
+ self.hunk.append(line)
if not line:
- return None, i[0]
- size = int(line[8:])
+ raise PatchError(_('could not extract binary patch'))
+ size = int(line[8:].rstrip())
dec = []
- line = readline()
- while line:
+ line = fp.readline()
+ self.hunk.append(line)
+ while len(line) > 1:
l = line[0]
if l <= 'Z' and l >= 'A':
l = ord(l) - ord('A') + 1
else:
l = ord(l) - ord('a') + 27
- dec.append(base85.b85decode(line[1:])[:l])
- line = readline()
+ dec.append(base85.b85decode(line[1:-1])[:l])
+ line = fp.readline()
+ self.hunk.append(line)
text = zlib.decompress(''.join(dec))
if len(text) != size:
- raise util.Abort(_('binary patch is %d bytes, not %d') %
- (len(text), size))
- return text, i[0]
+ raise PatchError(_('binary patch is %d bytes, not %d') %
+ len(text), size)
+ self.text = text
- pf = file(patchname)
- pfline = 1
-
- fd, patchname = tempfile.mkstemp(prefix='hg-patch-')
- tmpfp = os.fdopen(fd, 'w')
+def parsefilename(str):
+ # --- filename \t|space stuff
+ s = str[4:]
+ i = s.find('\t')
+ if i < 0:
+ i = s.find(' ')
+ if i < 0:
+ return s
+ return s[:i]
- try:
- for i in xrange(len(gitpatches)):
- p = gitpatches[i]
- if not p.copymod and not p.binary:
- continue
-
- # rewrite patch hunk
- while pfline < p.lineno:
- tmpfp.write(pf.readline())
- pfline += 1
+def selectfile(afile_orig, bfile_orig, hunk, strip, reverse):
+ def pathstrip(path, count=1):
+ pathlen = len(path)
+ i = 0
+ if count == 0:
+ return path.rstrip()
+ while count > 0:
+ i = path.find('/', i)
+ if i == -1:
+ raise PatchError(_("unable to strip away %d dirs from %s") %
+ (count, path))
+ i += 1
+ # consume '//' in the path
+ while i < pathlen - 1 and path[i] == '/':
+ i += 1
+ count -= 1
+ return path[i:].rstrip()
- if p.binary:
- text, delta = extractbin(pf)
- if not text:
- raise util.Abort(_('binary patch extraction failed'))
- pfline += delta
- if not cwd:
- cwd = os.getcwd()
- absdst = os.path.join(cwd, p.path)
- basedir = os.path.dirname(absdst)
- if not os.path.isdir(basedir):
- os.makedirs(basedir)
- out = file(absdst, 'wb')
- out.write(text)
- out.close()
- elif p.copymod:
- copyfile(p.oldpath, p.path, basedir=cwd)
- tmpfp.write('diff --git a/%s b/%s\n' % (p.path, p.path))
- line = pf.readline()
- pfline += 1
- while not line.startswith('--- a/'):
- tmpfp.write(line)
- line = pf.readline()
- pfline += 1
- tmpfp.write('--- a/%s\n' % p.path)
+ nulla = afile_orig == "/dev/null"
+ nullb = bfile_orig == "/dev/null"
+ afile = pathstrip(afile_orig, strip)
+ gooda = os.path.exists(afile) and not nulla
+ bfile = pathstrip(bfile_orig, strip)
+ if afile == bfile:
+ goodb = gooda
+ else:
+ goodb = os.path.exists(bfile) and not nullb
+ createfunc = hunk.createfile
+ if reverse:
+ createfunc = hunk.rmfile
+ if not goodb and not gooda and not createfunc():
+ raise PatchError(_("unable to find %s or %s for patching") %
+ (afile, bfile))
+ if gooda and goodb:
+ fname = bfile
+ if afile in bfile:
+ fname = afile
+ elif gooda:
+ fname = afile
+ elif not nullb:
+ fname = bfile
+ if afile in bfile:
+ fname = afile
+ elif not nulla:
+ fname = afile
+ return fname
+
+class linereader:
+ # simple class to allow pushing lines back into the input stream
+ def __init__(self, fp):
+ self.fp = fp
+ self.buf = []
+
+ def push(self, line):
+ self.buf.append(line)
- line = pf.readline()
- while line:
- tmpfp.write(line)
- line = pf.readline()
- except:
- tmpfp.close()
- os.unlink(patchname)
- raise
+ def readline(self):
+ if self.buf:
+ l = self.buf[0]
+ del self.buf[0]
+ return l
+ return self.fp.readline()
+
+def applydiff(ui, fp, changed, strip=1, sourcefile=None, reverse=False,
+ rejmerge=None, updatedir=None):
+ """reads a patch from fp and tries to apply it. The dict 'changed' is
+ filled in with all of the filenames changed by the patch. Returns 0
+ for a clean patch, -1 if any rejects were found and 1 if there was
+ any fuzz."""
+
+ def scangitpatch(fp, firstline, cwd=None):
+ '''git patches can modify a file, then copy that file to
+ a new file, but expect the source to be the unmodified form.
+ So we scan the patch looking for that case so we can do
+ the copies ahead of time.'''
- tmpfp.close()
- return patchname
+ pos = 0
+ try:
+ pos = fp.tell()
+ except IOError:
+ fp = cStringIO.StringIO(fp.read())
+
+ (dopatch, gitpatches) = readgitpatch(fp, firstline)
+ for gp in gitpatches:
+ if gp.copymod:
+ copyfile(gp.oldpath, gp.path, basedir=cwd)
+
+ fp.seek(pos)
-def patch(patchname, ui, strip=1, cwd=None, files={}):
- """apply the patch <patchname> to the working directory.
- a list of patched files is returned"""
+ return fp, dopatch, gitpatches
+
+ current_hunk = None
+ current_file = None
+ afile = ""
+ bfile = ""
+ state = None
+ hunknum = 0
+ rejects = 0
+
+ git = False
+ gitre = re.compile('diff --git (a/.*) (b/.*)')
- # helper function
- def __patch(patchname):
- """patch and updates the files and fuzz variables"""
- fuzz = False
-
- args = []
- patcher = ui.config('ui', 'patch')
- if not patcher:
- patcher = util.find_exe('gpatch') or util.find_exe('patch')
- # Try to be smart only if patch call was not supplied
- if util.needbinarypatch():
- args.append('--binary')
-
- if not patcher:
- raise util.Abort(_('no patch command found in hgrc or PATH'))
-
- if cwd:
- args.append('-d %s' % util.shellquote(cwd))
- fp = os.popen('%s %s -p%d < %s' % (patcher, ' '.join(args), strip,
- util.shellquote(patchname)))
+ # our states
+ BFILE = 1
+ err = 0
+ context = None
+ lr = linereader(fp)
+ dopatch = True
+ gitworkdone = False
- for line in fp:
- line = line.rstrip()
- ui.note(line + '\n')
- if line.startswith('patching file '):
- pf = util.parse_patch_output(line)
- printed_file = False
- files.setdefault(pf, (None, None))
- elif line.find('with fuzz') >= 0:
- fuzz = True
- if not printed_file:
- ui.warn(pf + '\n')
- printed_file = True
- ui.warn(line + '\n')
- elif line.find('saving rejects to file') >= 0:
- ui.warn(line + '\n')
- elif line.find('FAILED') >= 0:
- if not printed_file:
- ui.warn(pf + '\n')
- printed_file = True
- ui.warn(line + '\n')
- code = fp.close()
- if code:
- raise util.Abort(_("patch command failed: %s") %
- util.explain_exit(code)[0])
- return fuzz
+ while True:
+ newfile = False
+ x = lr.readline()
+ if not x:
+ break
+ if current_hunk:
+ if x.startswith('\ '):
+ current_hunk.fix_newline()
+ ret = current_file.apply(current_hunk, reverse)
+ if ret >= 0:
+ changed.setdefault(current_file.fname, (None, None))
+ if ret > 0:
+ err = 1
+ current_hunk = None
+ gitworkdone = False
+ if ((sourcefile or state == BFILE) and ((not context and x[0] == '@') or
+ ((context or context == None) and x.startswith('***************')))):
+ try:
+ if context == None and x.startswith('***************'):
+ context = True
+ current_hunk = hunk(x, hunknum + 1, lr, context)
+ except PatchError, err:
+ ui.debug(err)
+ current_hunk = None
+ continue
+ hunknum += 1
+ if not current_file:
+ if sourcefile:
+ current_file = patchfile(ui, sourcefile)
+ else:
+ current_file = selectfile(afile, bfile, current_hunk,
+ strip, reverse)
+ current_file = patchfile(ui, current_file)
+ elif state == BFILE and x.startswith('GIT binary patch'):
+ current_hunk = binhunk(changed[bfile[2:]][1])
+ if not current_file:
+ if sourcefile:
+ current_file = patchfile(ui, sourcefile)
+ else:
+ current_file = selectfile(afile, bfile, current_hunk,
+ strip, reverse)
+ current_file = patchfile(ui, current_file)
+ hunknum += 1
+ current_hunk.extract(fp)
+ elif x.startswith('diff --git'):
+ # check for git diff, scanning the whole patch file if needed
+ m = gitre.match(x)
+ if m:
+ afile, bfile = m.group(1, 2)
+ if not git:
+ git = True
+ fp, dopatch, gitpatches = scangitpatch(fp, x)
+ for gp in gitpatches:
+ changed[gp.path] = (gp.op, gp)
+ # else error?
+ # copy/rename + modify should modify target, not source
+ if changed.get(bfile[2:], (None, None))[0] in ('COPY',
+ 'RENAME'):
+ afile = bfile
+ gitworkdone = True
+ newfile = True
+ elif x.startswith('---'):
+ # check for a unified diff
+ l2 = lr.readline()
+ if not l2.startswith('+++'):
+ lr.push(l2)
+ continue
+ newfile = True
+ context = False
+ afile = parsefilename(x)
+ bfile = parsefilename(l2)
+ elif x.startswith('***'):
+ # check for a context diff
+ l2 = lr.readline()
+ if not l2.startswith('---'):
+ lr.push(l2)
+ continue
+ l3 = lr.readline()
+ lr.push(l3)
+ if not l3.startswith("***************"):
+ lr.push(l2)
+ continue
+ newfile = True
+ context = True
+ afile = parsefilename(x)
+ bfile = parsefilename(l2)
- (dopatch, gitpatches) = readgitpatch(patchname)
- for gp in gitpatches:
- files[gp.path] = (gp.op, gp)
-
- fuzz = False
- if dopatch:
- filterpatch = dopatch & (GP_FILTER | GP_BINARY)
- if filterpatch:
- patchname = dogitpatch(patchname, gitpatches, cwd=cwd)
- try:
- if dopatch & GP_PATCH:
- fuzz = __patch(patchname)
- finally:
- if filterpatch:
- os.unlink(patchname)
-
- return fuzz
+ if newfile:
+ if current_file:
+ current_file.close()
+ if rejmerge:
+ rejmerge(current_file)
+ rejects += len(current_file.rej)
+ state = BFILE
+ current_file = None
+ hunknum = 0
+ if current_hunk:
+ if current_hunk.complete():
+ ret = current_file.apply(current_hunk, reverse)
+ if ret >= 0:
+ changed.setdefault(current_file.fname, (None, None))
+ if ret > 0:
+ err = 1
+ else:
+ fname = current_file and current_file.fname or None
+ raise PatchError(_("malformed patch %s %s") % (fname,
+ current_hunk.desc))
+ if current_file:
+ current_file.close()
+ if rejmerge:
+ rejmerge(current_file)
+ rejects += len(current_file.rej)
+ if updatedir and git:
+ updatedir(gitpatches)
+ if rejects:
+ return -1
+ if hunknum == 0 and dopatch and not gitworkdone:
+ raise NoHunks
+ return err
def diffopts(ui, opts={}, untrusted=False):
def get(key, name=None):
--- a/mercurial/statichttprepo.py Sat Jul 21 17:36:45 2007 +0200
+++ b/mercurial/statichttprepo.py Sat Jul 21 17:37:39 2007 +0200
@@ -75,10 +75,4 @@
def instance(ui, path, create):
if create:
raise util.Abort(_('cannot create new static-http repository'))
- if path.startswith('old-http:'):
- ui.warn(_("old-http:// syntax is deprecated, "
- "please use static-http:// instead\n"))
- path = path[4:]
- else:
- path = path[7:]
- return statichttprepository(ui, path)
+ return statichttprepository(ui, path[7:])
--- a/mercurial/util.py Sat Jul 21 17:36:45 2007 +0200
+++ b/mercurial/util.py Sat Jul 21 17:37:39 2007 +0200
@@ -63,7 +63,7 @@
Convert a string from the local character encoding to UTF-8
We attempt to decode strings using the encoding mode set by
- HG_ENCODINGMODE, which defaults to 'strict'. In this mode, unknown
+ HGENCODINGMODE, which defaults to 'strict'. In this mode, unknown
characters will cause an error message. Other modes include
'replace', which replaces unknown characters with a special
Unicode character, and 'ignore', which drops the character.
@@ -616,7 +616,7 @@
"""forcibly rename a file"""
try:
os.rename(src, dst)
- except OSError, err:
+ except OSError, err: # FIXME: check err (EEXIST ?)
# on windows, rename to existing file is not allowed, so we
# must delete destination first. but if file is open, unlink
# schedules it for delete but does not delete it. rename
@@ -1303,7 +1303,11 @@
os.makedirs(dirname)
if self._can_symlink:
- os.symlink(src, linkname)
+ try:
+ os.symlink(src, linkname)
+ except OSError, err:
+ raise OSError(err.errno, _('could not symlink to %r: %s') %
+ (src, err.strerror), linkname)
else:
f = self(self, dst, "w")
f.write(src)
--- a/mercurial/util_win32.py Sat Jul 21 17:36:45 2007 +0200
+++ b/mercurial/util_win32.py Sat Jul 21 17:37:39 2007 +0200
@@ -209,9 +209,9 @@
def __init__(self, name, mode='rb'):
access = 0
- if 'r' in mode or '+' in mode:
+ if 'r' in mode:
access |= win32file.GENERIC_READ
- if 'w' in mode or 'a' in mode:
+ if 'w' in mode or 'a' in mode or '+' in mode:
access |= win32file.GENERIC_WRITE
if 'r' in mode:
creation = win32file.OPEN_EXISTING
--- a/setup.py Sat Jul 21 17:36:45 2007 +0200
+++ b/setup.py Sat Jul 21 17:37:39 2007 +0200
@@ -2,8 +2,8 @@
#
# This is the mercurial setup script.
#
-# './setup.py install', or
-# './setup.py --help' for more options
+# 'python setup.py install', or
+# 'python setup.py --help' for more options
import sys
if not hasattr(sys, 'version_info') or sys.version_info < (2, 3, 0, 'final'):
@@ -64,7 +64,8 @@
packages=['mercurial', 'mercurial.hgweb', 'hgext', 'hgext.convert'],
ext_modules=[Extension('mercurial.mpatch', ['mercurial/mpatch.c']),
Extension('mercurial.bdiff', ['mercurial/bdiff.c']),
- Extension('mercurial.base85', ['mercurial/base85.c'])],
+ Extension('mercurial.base85', ['mercurial/base85.c']),
+ Extension('mercurial.diffhelpers', ['mercurial/diffhelpers.c'])],
data_files=[(os.path.join('mercurial', root),
[os.path.join(root, file_) for file_ in files])
for root, dirs, files in os.walk('templates')],
--- a/templates/gitweb/map Sat Jul 21 17:36:45 2007 +0200
+++ b/templates/gitweb/map Sat Jul 21 17:37:39 2007 +0200
@@ -5,6 +5,7 @@
changelog = changelog.tmpl
summary = summary.tmpl
error = error.tmpl
+notfound = notfound.tmpl
naventry = '<a href="{url}log/{node|short}{sessionvars%urlparameter}">{label|escape}</a> '
navshortentry = '<a href="{url}shortlog/{node|short}{sessionvars%urlparameter}">{label|escape}</a> '
filenaventry = '<a href="{url}log/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{label|escape}</a> '
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/templates/gitweb/notfound.tmpl Sat Jul 21 17:37:39 2007 +0200
@@ -0,0 +1,19 @@
+{header}
+<title>Mercurial repositories index</title>
+</head>
+
+<body>
+
+<div class="page_header">
+<a href="http://www.selenic.com/mercurial/" title="Mercurial"><div
+ style="float:right;">Mercurial</div></a> Not found: {repo|escape}
+</div>
+
+<div class="page_body">
+The specified repository "{repo|escape}" is unknown, sorry.
+<br/>
+<br/>
+Please go back to the <a href="/">main repository list page</a>.
+</div>
+
+{footer}
--- a/tests/coverage.py Sat Jul 21 17:36:45 2007 +0200
+++ b/tests/coverage.py Sat Jul 21 17:37:39 2007 +0200
@@ -504,7 +504,7 @@
def get_suite_spots(self, tree, spots):
import symbol, token
for i in range(1, len(tree)):
- if type(tree[i]) == type(()):
+ if isinstance(tree[i], tuple):
if tree[i][0] == symbol.suite:
# Found a suite, look back for the colon and keyword.
lineno_colon = lineno_word = None
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-alias Sat Jul 21 17:37:39 2007 +0200
@@ -0,0 +1,32 @@
+#!/bin/sh
+
+cat > $HGRCPATH <<EOF
+[extensions]
+alias=
+
+[alias]
+myinit = init
+cleanstatus = status -c
+unknown = bargle
+ambiguous = s
+recursive = recursive
+EOF
+
+echo '% basic'
+hg myinit alias
+
+echo '% unknown'
+hg unknown
+
+echo '% ambiguous'
+hg ambiguous
+
+echo '% recursive'
+hg recursive
+
+cd alias
+echo foo > foo
+hg ci -Amfoo
+
+echo '% with opts'
+hg cleanst
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-alias.out Sat Jul 21 17:37:39 2007 +0200
@@ -0,0 +1,10 @@
+% basic
+% unknown
+*** [alias] unknown: command bargle is unknown
+% ambiguous
+*** [alias] ambiguous: command s is ambiguous
+% recursive
+*** [alias] recursive: circular dependency on recursive
+adding foo
+% with opts
+C foo
--- a/tests/test-annotate Sat Jul 21 17:36:45 2007 +0200
+++ b/tests/test-annotate Sat Jul 21 17:37:39 2007 +0200
@@ -12,18 +12,27 @@
echo % annotate -c
hg annotate -c a
+echo % annotate -cl
+hg annotate -cl a
+
echo % annotate -d
hg annotate -d a
echo % annotate -n
hg annotate -n a
+echo % annotate -nl
+hg annotate -nl a
+
echo % annotate -u
hg annotate -u a
echo % annotate -cdnu
hg annotate -cdnu a
+echo % annotate -cdnul
+hg annotate -cdnul a
+
cat <<EOF >>a
a
a
@@ -32,28 +41,34 @@
hg cp a b
hg ci -mb -d '1 0'
cat <<EOF >> b
-b
-b
-b
+b4
+b5
+b6
EOF
hg ci -mb2 -d '2 0'
-echo % annotate b
-hg annotate b
+echo % annotate -n b
+hg annotate -n b
+echo % annotate -nl b
+hg annotate -nl b
echo % annotate -nf b
hg annotate -nf b
+echo % annotate -nlf b
+hg annotate -nlf b
hg up -C 2
cat <<EOF >> b
-b
+b4
c
-b
+b5
EOF
hg ci -mb2.1 -d '2 0'
hg merge
hg ci -mmergeb -d '3 0'
echo % annotate after merge
hg annotate -nf b
+echo % annotate after merge with -l
+hg annotate -nlf b
hg up -C 1
hg cp a b
@@ -65,17 +80,21 @@
hg ci -mc -d '3 0'
hg merge
cat <<EOF >> b
-b
+b4
c
-b
+b5
EOF
echo d >> b
hg ci -mmerge2 -d '4 0'
echo % annotate after rename merge
hg annotate -nf b
+echo % annotate after rename merge with -l
+hg annotate -nlf b
echo % linkrev vs rev
-hg annotate -r tip a
+hg annotate -r tip -n a
+echo % linkrev vs rev with -l
+hg annotate -r tip -nl a
# test issue 589
# annotate was crashing when trying to --follow something
--- a/tests/test-annotate.out Sat Jul 21 17:36:45 2007 +0200
+++ b/tests/test-annotate.out Sat Jul 21 17:37:39 2007 +0200
@@ -3,28 +3,48 @@
adding a
% annotate -c
8435f90966e4: a
+% annotate -cl
+8435f90966e4:1: a
% annotate -d
Thu Jan 01 00:00:01 1970 +0000: a
% annotate -n
0: a
+% annotate -nl
+0:1: a
% annotate -u
nobody: a
% annotate -cdnu
nobody 0 8435f90966e4 Thu Jan 01 00:00:01 1970 +0000: a
-% annotate b
+% annotate -cdnul
+nobody 0 8435f90966e4 Thu Jan 01 00:00:01 1970 +0000:1: a
+% annotate -n b
2: a
2: a
2: a
-3: b
-3: b
-3: b
+3: b4
+3: b5
+3: b6
+% annotate -nl b
+2:1: a
+2:2: a
+2:3: a
+3:4: b4
+3:5: b5
+3:6: b6
% annotate -nf b
0 a: a
1 a: a
1 a: a
-3 b: b
-3 b: b
-3 b: b
+3 b: b4
+3 b: b5
+3 b: b6
+% annotate -nlf b
+0 a:1: a
+1 a:2: a
+1 a:3: a
+3 b:4: b4
+3 b:5: b5
+3 b:6: b6
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
merging b
0 files updated, 1 files merged, 0 files removed, 0 files unresolved
@@ -33,9 +53,16 @@
0 a: a
1 a: a
1 a: a
-3 b: b
+3 b: b4
4 b: c
-3 b: b
+3 b: b5
+% annotate after merge with -l
+0 a:1: a
+1 a:2: a
+1 a:3: a
+3 b:4: b4
+4 b:5: c
+3 b:5: b5
0 files updated, 0 files merged, 1 files removed, 0 files unresolved
merging b
0 files updated, 1 files merged, 0 files removed, 0 files unresolved
@@ -44,14 +71,26 @@
0 a: a
6 b: z
1 a: a
-3 b: b
+3 b: b4
4 b: c
-3 b: b
+3 b: b5
7 b: d
+% annotate after rename merge with -l
+0 a:1: a
+6 b:2: z
+1 a:3: a
+3 b:4: b4
+4 b:5: c
+3 b:5: b5
+7 b:7: d
% linkrev vs rev
0: a
1: a
1: a
+% linkrev vs rev with -l
+0:1: a
+1:2: a
+1:3: a
% generate ABA rename configuration
% annotate after ABA with follow
foo: foo
--- a/tests/test-archive Sat Jul 21 17:36:45 2007 +0200
+++ b/tests/test-archive Sat Jul 21 17:37:39 2007 +0200
@@ -63,7 +63,14 @@
unzip -t test.zip
hg archive -t tar - | tar tf - | sed "s/$QTIP/TIP/"
+
hg archive -r 0 -t tar rev-%r.tar
if [ -f rev-0.tar ]; then
echo 'rev-0.tar created'
fi
+
+echo '% empty repo'
+hg init ../empty
+cd ../empty
+hg archive ../test-empty
+exit 0
--- a/tests/test-archive.out Sat Jul 21 17:36:45 2007 +0200
+++ b/tests/test-archive.out Sat Jul 21 17:37:39 2007 +0200
@@ -39,3 +39,5 @@
test-TIP/baz/bletch
test-TIP/foo
rev-0.tar created
+% empty repo
+abort: repository has no revisions
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-children Sat Jul 21 17:37:39 2007 +0200
@@ -0,0 +1,59 @@
+#!/bin/sh
+# test children command
+
+cat <<EOF >> $HGRCPATH
+[extensions]
+hgext.children=
+EOF
+
+echo "% init"
+hg init t
+cd t
+
+echo "% no working directory"
+hg children
+
+echo % setup
+echo 0 > file0
+hg ci -qAm 0 -d '0 0'
+
+echo 1 > file1
+hg ci -qAm 1 -d '1 0'
+
+echo 2 >> file0
+hg ci -qAm 2 -d '2 0'
+
+hg co null
+echo 3 > file3
+hg ci -qAm 3 -d '3 0'
+
+echo "% hg children at revision 3 (tip)"
+hg children
+
+hg co null
+echo "% hg children at nullrev (should be 0 and 3)"
+hg children
+
+hg co 1
+echo "% hg children at revision 1 (should be 2)"
+hg children
+
+hg co 2
+echo "% hg children at revision 2 (other head)"
+hg children
+
+for i in null 0 1 2 3; do
+ echo "% hg children -r $i"
+ hg children -r $i
+done
+
+echo "% hg children -r 0 file0 (should be 2)"
+hg children -r 0 file0
+
+echo "% hg children -r 1 file0 (should be 2)"
+hg children -r 1 file0
+
+hg co 0
+echo "% hg children file0 at revision 0 (should be 2)"
+hg children file0
+
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-children.out Sat Jul 21 17:37:39 2007 +0200
@@ -0,0 +1,62 @@
+% init
+% no working directory
+% setup
+0 files updated, 0 files merged, 2 files removed, 0 files unresolved
+% hg children at revision 3 (tip)
+0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+% hg children at nullrev (should be 0 and 3)
+2 files updated, 0 files merged, 0 files removed, 0 files unresolved
+% hg children at revision 1 (should be 2)
+changeset: 2:8f5eea5023c2
+user: test
+date: Thu Jan 01 00:00:02 1970 +0000
+summary: 2
+
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+% hg children at revision 2 (other head)
+% hg children -r null
+changeset: 0:4df8521a7374
+user: test
+date: Thu Jan 01 00:00:00 1970 +0000
+summary: 0
+
+changeset: 3:e2962852269d
+tag: tip
+parent: -1:000000000000
+user: test
+date: Thu Jan 01 00:00:03 1970 +0000
+summary: 3
+
+% hg children -r 0
+changeset: 1:708c093edef0
+user: test
+date: Thu Jan 01 00:00:01 1970 +0000
+summary: 1
+
+% hg children -r 1
+changeset: 2:8f5eea5023c2
+user: test
+date: Thu Jan 01 00:00:02 1970 +0000
+summary: 2
+
+% hg children -r 2
+% hg children -r 3
+% hg children -r 0 file0 (should be 2)
+changeset: 2:8f5eea5023c2
+user: test
+date: Thu Jan 01 00:00:02 1970 +0000
+summary: 2
+
+% hg children -r 1 file0 (should be 2)
+changeset: 2:8f5eea5023c2
+user: test
+date: Thu Jan 01 00:00:02 1970 +0000
+summary: 2
+
+1 files updated, 0 files merged, 1 files removed, 0 files unresolved
+% hg children file0 at revision 0 (should be 2)
+changeset: 2:8f5eea5023c2
+user: test
+date: Thu Jan 01 00:00:02 1970 +0000
+summary: 2
+
--- a/tests/test-debugcomplete.out Sat Jul 21 17:36:45 2007 +0200
+++ b/tests/test-debugcomplete.out Sat Jul 21 17:37:39 2007 +0200
@@ -110,6 +110,7 @@
% Show the options for the "serve" command
--accesslog
--address
+--certificate
--config
--cwd
--daemon
--- a/tests/test-import Sat Jul 21 17:36:45 2007 +0200
+++ b/tests/test-import Sat Jul 21 17:37:39 2007 +0200
@@ -93,6 +93,24 @@
hg --cwd b tip | grep second
rm -r b
+# subject: duplicate detection, removal of [PATCH]
+cat > mkmsg2.py <<EOF
+import email.Message, sys
+msg = email.Message.Message()
+msg.set_payload('email patch\n\nnext line\n' + open('tip.patch').read())
+msg['Subject'] = '[PATCH] email patch'
+msg['From'] = 'email patcher'
+sys.stdout.write(msg.as_string())
+EOF
+
+echo '% plain diff in email, [PATCH] subject, message body with subject'
+hg clone -r0 a b
+hg --cwd a diff -r0:1 > tip.patch
+python mkmsg2.py | hg --cwd b import -
+hg --cwd b tip --template '{desc}\n'
+rm -r b
+
+
# bug non regression test
# importing a patch in a subdirectory failed at the commit stage
echo line 2 >> a/d1/d2/a
--- a/tests/test-import.out Sat Jul 21 17:36:45 2007 +0200
+++ b/tests/test-import.out Sat Jul 21 17:37:39 2007 +0200
@@ -100,6 +100,17 @@
2 files updated, 0 files merged, 0 files removed, 0 files unresolved
applying patch from stdin
summary: second change
+% plain diff in email, [PATCH] subject, message body with subject
+requesting all changes
+adding changesets
+adding manifests
+adding file changes
+added 1 changesets with 2 changes to 2 files
+2 files updated, 0 files merged, 0 files removed, 0 files unresolved
+applying patch from stdin
+email patch
+
+next line
% hg import in a subdirectory
requesting all changes
adding changesets
--- a/tests/test-mq Sat Jul 21 17:36:45 2007 +0200
+++ b/tests/test-mq Sat Jul 21 17:37:39 2007 +0200
@@ -370,10 +370,17 @@
echo foo > foo
hg add foo
hg ci -m 'add foo'
-hg qinit -c
+hg qinit
hg qnew patch1
echo bar >> foo
hg qrefresh -m 'change foo'
+cd ..
+
+# repo with unversioned patch dir
+hg qclone qclonesource failure
+
+cd qclonesource
+hg qinit -c
hg qci -m checkpoint
qlog
cd ..
--- a/tests/test-mq.out Sat Jul 21 17:36:45 2007 +0200
+++ b/tests/test-mq.out Sat Jul 21 17:37:39 2007 +0200
@@ -262,7 +262,8 @@
Patch queue now empty
applying foo
applying bar
-1 out of 1 hunk ignored -- saving rejects to file foo.rej
+file foo already exists
+1 out of 1 hunk FAILED -- saving rejects to file foo.rej
patch failed, unable to continue (try -v)
patch failed, rejects left in working dir
Errors during apply, please fix and refresh bar
@@ -409,6 +410,8 @@
summary: add foo
% qclone
+abort: versioned patch repository not found (see qinit -c)
+adding .hg/patches/patch1
main repo:
rev 1: change foo
rev 0: add foo
--- a/tests/test-transplant.out Sat Jul 21 17:36:45 2007 +0200
+++ b/tests/test-transplant.out Sat Jul 21 17:37:39 2007 +0200
@@ -101,17 +101,17 @@
adding bar
2 files updated, 0 files merged, 2 files removed, 0 files unresolved
applying a1e30dd1b8e7
-foo
-Hunk #1 FAILED at 1.
+patching file foo
+Hunk #1 FAILED at 0
1 out of 1 hunk FAILED -- saving rejects to file foo.rej
-patch command failed: exited with status 1
+patch failed to apply
abort: Fix up the merge and run hg transplant --continue
1 files updated, 0 files merged, 1 files removed, 0 files unresolved
applying a1e30dd1b8e7
-foo
-Hunk #1 FAILED at 1.
+patching file foo
+Hunk #1 FAILED at 0
1 out of 1 hunk FAILED -- saving rejects to file foo.rej
-patch command failed: exited with status 1
+patch failed to apply
abort: Fix up the merge and run hg transplant --continue
a1e30dd1b8e7 transplanted as f1563cf27039
skipping already applied revision 1:a1e30dd1b8e7