--- a/.hgignore Wed Aug 08 22:47:30 2007 +0200
+++ b/.hgignore Wed Aug 08 23:00:01 2007 +0200
@@ -4,6 +4,7 @@
*.orig
*.rej
*~
+*.mergebackup
*.o
*.so
*.pyc
--- a/contrib/buildrpm Wed Aug 08 22:47:30 2007 +0200
+++ b/contrib/buildrpm Wed Aug 08 23:00:01 2007 +0200
@@ -29,7 +29,7 @@
version=`hg tags | perl -e 'while(<STDIN>){if(/^(\d\S+)/){print$1;exit}}'`
# Compute the release number as the difference in revision numbers
# between the tip and the most recent tag.
-release=`hg tags | perl -e 'while(<STDIN>){/^(\S+)\s+(\d+)/;if($1eq"tip"){$t=$2}else{print$t-$2+1;exit}}'`
+release=`hg tags | perl -e 'while(<STDIN>){($tag,$id)=/^(\S+)\s+(\d+)/;if($tag eq "tip"){$tip = $id}elsif($tag=~/^\d/){print $tip-$id+1;exit}}'`
tip=`hg -q tip`
# Beat up the spec file
@@ -40,6 +40,19 @@
-e 's,^%setup.*,,' \
$specfile > $tmpspec
+cat <<EOF >> $tmpspec
+%changelog
+* `date +'%a %b %d %Y'` `hg showconfig ui.username` $version-$release
+- Automatically built via $0
+
+EOF
+hg log \
+ --template '* {date|rfc822date} {author}\n- {desc|firstline}\n\n' \
+ .hgtags \
+ | sed -e 's/^\(\* [MTWFS][a-z][a-z]\), \([0-3][0-9]\) \([A-Z][a-z][a-z]\) /\1 \3 \2 /' \
+ -e '/^\* [MTWFS][a-z][a-z] /{s/ [012][0-9]:[0-9][0-9]:[0-9][0-9] [+-][0-9]\{4\}//}' \
+ >> $tmpspec
+
rpmbuild --define "_topdir $rpmdir" -bb $tmpspec
if [ $? = 0 ]; then
rm -rf $tmpspec $rpmdir/BUILD
--- a/contrib/churn.py Wed Aug 08 22:47:30 2007 +0200
+++ b/contrib/churn.py Wed Aug 08 23:00:01 2007 +0200
@@ -11,9 +11,34 @@
#
# <alias email> <actual email>
-import sys
from mercurial.i18n import gettext as _
from mercurial import hg, mdiff, cmdutil, ui, util, templater, node
+import os, sys
+
+def get_tty_width():
+ if 'COLUMNS' in os.environ:
+ try:
+ return int(os.environ['COLUMNS'])
+ except ValueError:
+ pass
+ try:
+ import termios, fcntl, struct
+ buf = 'abcd'
+ for dev in (sys.stdout, sys.stdin):
+ try:
+ if buf != 'abcd':
+ break
+ fd = dev.fileno()
+ if not os.isatty(fd):
+ continue
+ buf = fcntl.ioctl(fd, termios.TIOCGWINSZ, buf)
+ except ValueError:
+ pass
+ if buf != 'abcd':
+ return struct.unpack('hh', buf)[1]
+ except ImportError:
+ pass
+ return 80
def __gather(ui, repo, node1, node2):
def dirtywork(f, mmap1, mmap2):
@@ -159,8 +184,9 @@
maximum = ordered[0][1]
- ui.note("Assuming 80 character terminal\n")
- width = 80 - 1
+ width = get_tty_width()
+ ui.note(_("assuming %i character terminal\n") % width)
+ width -= 1
for i in ordered:
person = i[0]
--- a/contrib/hgk Wed Aug 08 22:47:30 2007 +0200
+++ b/contrib/hgk Wed Aug 08 23:00:01 2007 +0200
@@ -5,6 +5,74 @@
# and distributed under the terms of the GNU General Public Licence,
# either version 2, or (at your option) any later version.
+
+# Modified version of Tip 171:
+# http://www.tcl.tk/cgi-bin/tct/tip/171.html
+#
+# The in_mousewheel global was added to fix strange reentrancy issues.
+# The whole snipped is activated only under windows, mouse wheel
+# bindings working already under MacOSX and Linux.
+
+if {[tk windowingsystem] eq "win32"} {
+
+set mw_classes [list Text Listbox Table TreeCtrl]
+ foreach class $mw_classes { bind $class <MouseWheel> {} }
+
+set in_mousewheel 0
+
+proc ::tk::MouseWheel {wFired X Y D {shifted 0}} {
+ global in_mousewheel
+ if { $in_mousewheel != 0 } { return }
+ # Set event to check based on call
+ set evt "<[expr {$shifted?{Shift-}:{}}]MouseWheel>"
+ # do not double-fire in case the class already has a binding
+ if {[bind [winfo class $wFired] $evt] ne ""} { return }
+ # obtain the window the mouse is over
+ set w [winfo containing $X $Y]
+ # if we are outside the app, try and scroll the focus widget
+ if {![winfo exists $w]} { catch {set w [focus]} }
+ if {[winfo exists $w]} {
+
+ if {[bind $w $evt] ne ""} {
+ # Awkward ... this widget has a MouseWheel binding, but to
+ # trigger successfully in it, we must give it focus.
+ catch {focus} old
+ if {$w ne $old} { focus $w }
+ set in_mousewheel 1
+ event generate $w $evt -rootx $X -rooty $Y -delta $D
+ set in_mousewheel 0
+ if {$w ne $old} { focus $old }
+ return
+ }
+
+ # aqua and x11/win32 have different delta handling
+ if {[tk windowingsystem] ne "aqua"} {
+ set delta [expr {- ($D / 30)}]
+ } else {
+ set delta [expr {- ($D)}]
+ }
+ # scrollbars have different call conventions
+ if {[string match "*Scrollbar" [winfo class $w]]} {
+ catch {tk::ScrollByUnits $w \
+ [string index [$w cget -orient] 0] $delta}
+ } else {
+ set cmd [list $w [expr {$shifted ? "xview" : "yview"}] \
+ scroll $delta units]
+ # Walking up to find the proper widget (handles cases like
+ # embedded widgets in a canvas)
+ while {[catch $cmd] && [winfo toplevel $w] ne $w} {
+ set w [winfo parent $w]
+ }
+ }
+ }
+}
+
+bind all <MouseWheel> [list ::tk::MouseWheel %W %X %Y %D 0]
+
+# end of win32 section
+}
+
+
proc gitdir {} {
global env
if {[info exists env(GIT_DIR)]} {
@@ -299,6 +367,11 @@
}
}
+proc allcansmousewheel {delta} {
+ set delta [expr -5*(int($delta)/abs($delta))]
+ allcanvs yview scroll $delta units
+}
+
proc error_popup msg {
set w .error
toplevel $w
@@ -470,6 +543,7 @@
bindall <1> {selcanvline %W %x %y}
#bindall <B1-Motion> {selcanvline %W %x %y}
+ bindall <MouseWheel> "allcansmousewheel %D"
bindall <ButtonRelease-4> "allcanvs yview scroll -5 units"
bindall <ButtonRelease-5> "allcanvs yview scroll 5 units"
bindall <2> "allcanvs scan mark 0 %y"
--- a/contrib/macosx/Readme.html Wed Aug 08 22:47:30 2007 +0200
+++ b/contrib/macosx/Readme.html Wed Aug 08 23:00:01 2007 +0200
@@ -19,10 +19,14 @@
<p class="p2"><br></p>
<p class="p3">This is <i>not</i> a stand-alone version of Mercurial.</p>
<p class="p2"><br></p>
-<p class="p3">To use it, you must have the Universal MacPython 2.4.3 from <a href="http://www.python.org">www.python.org</a> installed.</p>
+<p class="p3">To use it, you must have the appropriate version of Universal MacPython from <a href="http://www.python.org">www.python.org</a> installed.</p>
<p class="p2"><br></p>
-<p class="p3">You can download MacPython 2.4.3 from here:</p>
-<p class="p4"><span class="s1"><a href="http://www.python.org/ftp/python/2.4.3/Universal-MacPython-2.4.3-2006-04-07.dmg">http://www.python.org/ftp/python/2.4.3/Universal-MacPython-2.4.3-2006-04-07.dmg</a></span></p>
+<p class="p3">You can find more information and download MacPython from here:</p>
+<p class="p4"><span class="s1"><a href="http://www.python.org/download">http://www.python.org/download</a></span></p>
+<p class="p2"><br></p>
+<p class="p3">Or direct links to the latest version are:</p>
+<p class="p4"><span class="s1"><a href="http://www.python.org/ftp/python/2.5.1/python-2.5.1-macosx.dmg">Python 2.5.1 for Macintosh OS X</a></span></p>
+<p class="p4"><span class="s1"><a href="http://www.python.org/ftp/python/2.4.4/python-2.4.4-macosx2006-10-18.dmg">Python 2.4.4 for Macintosh OS X</a></span></p>
<p class="p2"><br></p>
<p class="p1"><b>After you install</b></p>
<p class="p2"><br></p>
--- a/contrib/mercurial.el Wed Aug 08 22:47:30 2007 +0200
+++ b/contrib/mercurial.el Wed Aug 08 23:00:01 2007 +0200
@@ -1261,9 +1261,22 @@
(interactive)
(error "not implemented"))
-(defun hg-version-other-window ()
- (interactive)
- (error "not implemented"))
+(defun hg-version-other-window (rev)
+ "Visit version REV of the current file in another window.
+If the current file is named `F', the version is named `F.~REV~'.
+If `F.~REV~' already exists, use it instead of checking it out again."
+ (interactive "sVersion to visit (default is workfile version): ")
+ (let* ((file buffer-file-name)
+ (version (if (string-equal rev "")
+ "tip"
+ rev))
+ (automatic-backup (vc-version-backup-file-name file version))
+ (manual-backup (vc-version-backup-file-name file version 'manual)))
+ (unless (file-exists-p manual-backup)
+ (if (file-exists-p automatic-backup)
+ (rename-file automatic-backup manual-backup nil)
+ (hg-run0 "-q" "cat" "-r" version "-o" manual-backup file)))
+ (find-file-other-window manual-backup)))
(provide 'mercurial)
--- a/contrib/mercurial.spec Wed Aug 08 22:47:30 2007 +0200
+++ b/contrib/mercurial.spec Wed Aug 08 23:00:01 2007 +0200
@@ -8,6 +8,17 @@
URL: http://www.selenic.com/mercurial
BuildRoot: /tmp/build.%{name}-%{version}-%{release}
+# From the README:
+#
+# Note: some distributions fails to include bits of distutils by
+# default, you'll need python-dev to install. You'll also need a C
+# compiler and a 3-way merge tool like merge, tkdiff, or kdiff3.
+#
+# python-devel provides an adequate python-dev. The merge tool is a
+# run-time dependency.
+#
+BuildRequires: python >= 2.3, python-devel, make, gcc, asciidoc, xmlto
+
%define pythonver %(python -c 'import sys;print ".".join(map(str, sys.version_info[:2]))')
%define pythonlib %{_libdir}/python%{pythonver}/site-packages/%{name}
%define hgext %{_libdir}/python%{pythonver}/site-packages/hgext
@@ -21,23 +32,51 @@
%setup -q
%build
-python setup.py build
+make all
%install
-python setup.py install --root $RPM_BUILD_ROOT
+python setup.py install --root $RPM_BUILD_ROOT --prefix %{_prefix}
+make install-doc DESTDIR=$RPM_BUILD_ROOT MANDIR=%{_mandir}
+
+install contrib/hgk $RPM_BUILD_ROOT%{_bindir}
+install contrib/convert-repo $RPM_BUILD_ROOT%{_bindir}/mercurial-convert-repo
+install contrib/hg-ssh $RPM_BUILD_ROOT%{_bindir}
+install contrib/git-viz/{hg-viz,git-rev-tree} $RPM_BUILD_ROOT%{_bindir}
+
+bash_completion_dir=$RPM_BUILD_ROOT%{_sysconfdir}/bash_completion.d
+mkdir -p $bash_completion_dir
+install contrib/bash_completion $bash_completion_dir/mercurial.sh
+
+zsh_completion_dir=$RPM_BUILD_ROOT%{_datadir}/zsh/site-functions
+mkdir -p $zsh_completion_dir
+install contrib/zsh_completion $zsh_completion_dir/_mercurial
+
+lisp_dir=$RPM_BUILD_ROOT%{_datadir}/emacs/site-lisp
+mkdir -p $lisp_dir
+install contrib/mercurial.el $lisp_dir
%clean
rm -rf $RPM_BUILD_ROOT
%files
%defattr(-,root,root,-)
-%doc doc/* *.cgi
+%doc CONTRIBUTORS COPYING doc/README doc/hg*.txt doc/hg*.html doc/ja *.cgi
+%{_mandir}/man?/hg*.gz
%dir %{pythonlib}
%dir %{hgext}
+%{_sysconfdir}/bash_completion.d/mercurial.sh
+%{_datadir}/zsh/site-functions/_mercurial
+%{_datadir}/emacs/site-lisp/mercurial.el
+%{_bindir}/hg
+%{_bindir}/hgk
%{_bindir}/hgmerge
-%{_bindir}/hg
+%{_bindir}/hg-ssh
+%{_bindir}/hg-viz
+%{_bindir}/git-rev-tree
+%{_bindir}/mercurial-convert-repo
%{pythonlib}/templates
%{pythonlib}/*.py*
%{pythonlib}/hgweb/*.py*
%{pythonlib}/*.so
%{hgext}/*.py*
+%{hgext}/convert/*.py*
--- a/contrib/win32/mercurial.ini Wed Aug 08 22:47:30 2007 +0200
+++ b/contrib/win32/mercurial.ini Wed Aug 08 23:00:01 2007 +0200
@@ -1,41 +1,41 @@
-; System-wide Mercurial config file. To override these settings on a
-; per-user basis, please edit the following file instead, where
-; USERNAME is your Windows user name:
-; C:\Documents and Settings\USERNAME\Mercurial.ini
-
-[ui]
-editor = notepad
-
-; By default, we try to encode and decode all files that do not
-; contain ASCII NUL characters. What this means is that we try to set
-; line endings to Windows style on update, and to Unix style on
-; commit. This lets us cooperate with Linux and Unix users, so
-; everybody sees files with their native line endings.
-
-[extensions]
-; The win32text extension is available and installed by default. It
-; provides built-in Python hooks to perform line ending conversions.
-; This is normally much faster than running an external program.
-hgext.win32text =
-
-
-[encode]
-; Encode files that don't contain NUL characters.
-
-; ** = cleverencode:
-
-; Alternatively, you can explicitly specify each file extension that
-; you want encoded (any you omit will be left untouched), like this:
-
-; *.txt = dumbencode:
-
-
-[decode]
-; Decode files that don't contain NUL characters.
-
-; ** = cleverdecode:
-
-; Alternatively, you can explicitly specify each file extension that
-; you want decoded (any you omit will be left untouched), like this:
-
-; **.txt = dumbdecode:
+; System-wide Mercurial config file. To override these settings on a
+; per-user basis, please edit the following file instead, where
+; USERNAME is your Windows user name:
+; C:\Documents and Settings\USERNAME\Mercurial.ini
+
+[ui]
+editor = notepad
+
+; By default, we try to encode and decode all files that do not
+; contain ASCII NUL characters. What this means is that we try to set
+; line endings to Windows style on update, and to Unix style on
+; commit. This lets us cooperate with Linux and Unix users, so
+; everybody sees files with their native line endings.
+
+[extensions]
+; The win32text extension is available and installed by default. It
+; provides built-in Python hooks to perform line ending conversions.
+; This is normally much faster than running an external program.
+hgext.win32text =
+
+
+[encode]
+; Encode files that don't contain NUL characters.
+
+; ** = cleverencode:
+
+; Alternatively, you can explicitly specify each file extension that
+; you want encoded (any you omit will be left untouched), like this:
+
+; *.txt = dumbencode:
+
+
+[decode]
+; Decode files that don't contain NUL characters.
+
+; ** = cleverdecode:
+
+; Alternatively, you can explicitly specify each file extension that
+; you want decoded (any you omit will be left untouched), like this:
+
+; **.txt = dumbdecode:
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/hgext/alias.py Wed Aug 08 23:00:01 2007 +0200
@@ -0,0 +1,76 @@
+# Copyright (C) 2007 Brendan Cully <brendan@kublai.com>
+# This file is published under the GNU GPL.
+
+'''allow user-defined command aliases
+
+To use, create entries in your hgrc of the form
+
+[alias]
+mycmd = cmd --args
+'''
+
+from mercurial.cmdutil import findcmd, UnknownCommand, AmbiguousCommand
+from mercurial import commands
+
+cmdtable = {}
+
+class RecursiveCommand(Exception): pass
+
+class lazycommand(object):
+ '''defer command lookup until needed, so that extensions loaded
+ after alias can be aliased'''
+ def __init__(self, ui, name, target):
+ self._ui = ui
+ self._name = name
+ self._target = target
+ self._cmd = None
+
+ def __len__(self):
+ self._resolve()
+ return len(self._cmd)
+
+ def __getitem__(self, key):
+ self._resolve()
+ return self._cmd[key]
+
+ def __iter__(self):
+ self._resolve()
+ return self._cmd.__iter__()
+
+ def _resolve(self):
+ if self._cmd is not None:
+ return
+
+ try:
+ self._cmd = findcmd(self._ui, self._target)[1]
+ if self._cmd == self:
+ raise RecursiveCommand()
+ if self._target in commands.norepo.split(' '):
+ commands.norepo += ' %s' % self._name
+ return
+ except UnknownCommand:
+ msg = '*** [alias] %s: command %s is unknown' % \
+ (self._name, self._target)
+ except AmbiguousCommand:
+ msg = '*** [alias] %s: command %s is ambiguous' % \
+ (self._name, self._target)
+ except RecursiveCommand:
+ msg = '*** [alias] %s: circular dependency on %s' % \
+ (self._name, self._target)
+ def nocmd(*args, **opts):
+ self._ui.warn(msg + '\n')
+ return 1
+ nocmd.__doc__ = msg
+ self._cmd = (nocmd, [], '')
+ commands.norepo += ' %s' % self._name
+
+def uisetup(ui):
+ for cmd, target in ui.configitems('alias'):
+ if not target:
+ ui.warn('*** [alias] %s: no definition\n' % cmd)
+ continue
+ args = target.split(' ')
+ tcmd = args.pop(0)
+ if args:
+ ui.setconfig('defaults', cmd, ' '.join(args))
+ cmdtable[cmd] = lazycommand(ui, cmd, tcmd)
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/hgext/children.py Wed Aug 08 23:00:01 2007 +0200
@@ -0,0 +1,41 @@
+# Mercurial extension to provide the 'hg children' command
+#
+# Copyright 2007 by Intevation GmbH <intevation@intevation.de>
+# Author(s):
+# Thomas Arendsen Hein <thomas@intevation.de>
+#
+# This software may be used and distributed according to the terms
+# of the GNU General Public License, incorporated herein by reference.
+
+from mercurial import cmdutil
+from mercurial.i18n import _
+
+
+def children(ui, repo, file_=None, **opts):
+ """show the children of the given or working dir revision
+
+ Print the children of the working directory's revisions.
+ If a revision is given via --rev, the children of that revision
+ will be printed. If a file argument is given, revision in
+ which the file was last changed (after the working directory
+ revision or the argument to --rev if given) is printed.
+ """
+ rev = opts.get('rev')
+ if file_:
+ ctx = repo.filectx(file_, changeid=rev)
+ else:
+ ctx = repo.changectx(rev)
+
+ displayer = cmdutil.show_changeset(ui, repo, opts)
+ for node in [cp.node() for cp in ctx.children()]:
+ displayer.show(changenode=node)
+
+
+cmdtable = {
+ "children":
+ (children,
+ [('r', 'rev', '', _('show children of the specified rev')),
+ ('', 'style', '', _('display using template map file')),
+ ('', 'template', '', _('display with template'))],
+ _('hg children [-r REV] [FILE]')),
+}
--- a/hgext/convert/__init__.py Wed Aug 08 22:47:30 2007 +0200
+++ b/hgext/convert/__init__.py Wed Aug 08 23:00:01 2007 +0200
@@ -5,48 +5,60 @@
# This software may be used and distributed according to the terms
# of the GNU General Public License, incorporated herein by reference.
-from common import NoRepo
+from common import NoRepo, converter_source, converter_sink
from cvs import convert_cvs
from git import convert_git
-from hg import convert_mercurial
+from hg import mercurial_source, mercurial_sink
+from subversion import convert_svn, debugsvnlog
-import os
+import os, shlex, shutil
from mercurial import hg, ui, util, commands
+from mercurial.i18n import _
+
+commands.norepo += " convert debugsvnlog"
-commands.norepo += " convert"
+converters = [convert_cvs, convert_git, convert_svn, mercurial_source,
+ mercurial_sink]
-converters = [convert_cvs, convert_git, convert_mercurial]
+def convertsource(ui, path, **opts):
+ for c in converters:
+ try:
+ return c.getcommit and c(ui, path, **opts)
+ except (AttributeError, NoRepo):
+ pass
+ raise util.Abort('%s: unknown repository type' % path)
-def converter(ui, path):
+def convertsink(ui, path):
if not os.path.isdir(path):
raise util.Abort("%s: not a directory" % path)
for c in converters:
try:
- return c(ui, path)
- except NoRepo:
+ return c.putcommit and c(ui, path)
+ except (AttributeError, NoRepo):
pass
- raise util.Abort("%s: unknown repository type" % path)
+ raise util.Abort('%s: unknown repository type' % path)
class convert(object):
- def __init__(self, ui, source, dest, mapfile, opts):
+ def __init__(self, ui, source, dest, revmapfile, filemapper, opts):
self.source = source
self.dest = dest
self.ui = ui
self.opts = opts
self.commitcache = {}
- self.mapfile = mapfile
- self.mapfilefd = None
+ self.revmapfile = revmapfile
+ self.revmapfilefd = None
self.authors = {}
self.authorfile = None
+ self.mapfile = filemapper
self.map = {}
try:
- origmapfile = open(self.mapfile, 'r')
- for l in origmapfile:
+ origrevmapfile = open(self.revmapfile, 'r')
+ for l in origrevmapfile:
sv, dv = l[:-1].split()
self.map[sv] = dv
- origmapfile.close()
+ origrevmapfile.close()
except IOError:
pass
@@ -138,14 +150,14 @@
return s
def mapentry(self, src, dst):
- if self.mapfilefd is None:
+ if self.revmapfilefd is None:
try:
- self.mapfilefd = open(self.mapfile, "a")
+ self.revmapfilefd = open(self.revmapfile, "a")
except IOError, (errno, strerror):
- raise util.Abort("Could not open map file %s: %s, %s\n" % (self.mapfile, errno, strerror))
+ raise util.Abort("Could not open map file %s: %s, %s\n" % (self.revmapfile, errno, strerror))
self.map[src] = dst
- self.mapfilefd.write("%s %s\n" % (src, dst))
- self.mapfilefd.flush()
+ self.revmapfilefd.write("%s %s\n" % (src, dst))
+ self.revmapfilefd.flush()
def writeauthormap(self):
authorfile = self.authorfile
@@ -177,25 +189,38 @@
afile.close()
def copy(self, rev):
- c = self.commitcache[rev]
- files = self.source.getchanges(rev)
+ commit = self.commitcache[rev]
+ do_copies = hasattr(self.dest, 'copyfile')
+ filenames = []
+ files, copies = self.source.getchanges(rev)
for f, v in files:
+ newf = self.mapfile(f)
+ if not newf:
+ continue
+ filenames.append(newf)
try:
data = self.source.getfile(f, v)
except IOError, inst:
- self.dest.delfile(f)
+ self.dest.delfile(newf)
else:
e = self.source.getmode(f, v)
- self.dest.putfile(f, e, data)
+ self.dest.putfile(newf, e, data)
+ if do_copies:
+ if f in copies:
+ copyf = self.mapfile(copies[f])
+ if copyf:
+ # Merely marks that a copy happened.
+ self.dest.copyfile(copyf, newf)
- r = [self.map[v] for v in c.parents]
- f = [f for f, v in files]
- newnode = self.dest.putcommit(f, r, c)
+ parents = [self.map[r] for r in commit.parents]
+ newnode = self.dest.putcommit(filenames, parents, commit)
self.mapentry(rev, newnode)
def convert(self):
try:
+ self.dest.before()
+ self.source.setrevmap(self.map)
self.ui.status("scanning source...\n")
heads = self.source.getheads()
parents = self.walktree(heads)
@@ -235,25 +260,114 @@
self.cleanup()
def cleanup(self):
- if self.mapfilefd:
- self.mapfilefd.close()
+ self.dest.after()
+ if self.revmapfilefd:
+ self.revmapfilefd.close()
+
+def rpairs(name):
+ e = len(name)
+ while e != -1:
+ yield name[:e], name[e+1:]
+ e = name.rfind('/', 0, e)
+
+class filemapper(object):
+ '''Map and filter filenames when importing.
+ A name can be mapped to itself, a new name, or None (omit from new
+ repository).'''
+
+ def __init__(self, ui, path=None):
+ self.ui = ui
+ self.include = {}
+ self.exclude = {}
+ self.rename = {}
+ if path:
+ if self.parse(path):
+ raise util.Abort(_('errors in filemap'))
-def _convert(ui, src, dest=None, mapfile=None, **opts):
- '''Convert a foreign SCM repository to a Mercurial one.
+ def parse(self, path):
+ errs = 0
+ def check(name, mapping, listname):
+ if name in mapping:
+ self.ui.warn(_('%s:%d: %r already in %s list\n') %
+ (lex.infile, lex.lineno, name, listname))
+ return 1
+ return 0
+ lex = shlex.shlex(open(path), path, True)
+ lex.wordchars += '!@#$%^&*()-=+[]{}|;:,./<>?'
+ cmd = lex.get_token()
+ while cmd:
+ if cmd == 'include':
+ name = lex.get_token()
+ errs += check(name, self.exclude, 'exclude')
+ self.include[name] = name
+ elif cmd == 'exclude':
+ name = lex.get_token()
+ errs += check(name, self.include, 'include')
+ errs += check(name, self.rename, 'rename')
+ self.exclude[name] = name
+ elif cmd == 'rename':
+ src = lex.get_token()
+ dest = lex.get_token()
+ errs += check(src, self.exclude, 'exclude')
+ self.rename[src] = dest
+ elif cmd == 'source':
+ errs += self.parse(lex.get_token())
+ else:
+ self.ui.warn(_('%s:%d: unknown directive %r\n') %
+ (lex.infile, lex.lineno, cmd))
+ errs += 1
+ cmd = lex.get_token()
+ return errs
+
+ def lookup(self, name, mapping):
+ for pre, suf in rpairs(name):
+ try:
+ return mapping[pre], pre, suf
+ except KeyError, err:
+ pass
+ return '', name, ''
+
+ def __call__(self, name):
+ if self.include:
+ inc = self.lookup(name, self.include)[0]
+ else:
+ inc = name
+ if self.exclude:
+ exc = self.lookup(name, self.exclude)[0]
+ else:
+ exc = ''
+ if not inc or exc:
+ return None
+ newpre, pre, suf = self.lookup(name, self.rename)
+ if newpre:
+ if newpre == '.':
+ return suf
+ if suf:
+ return newpre + '/' + suf
+ return newpre
+ return name
+
+def _convert(ui, src, dest=None, revmapfile=None, **opts):
+ """Convert a foreign SCM repository to a Mercurial one.
Accepted source formats:
- GIT
- CVS
+ - SVN
Accepted destination formats:
- Mercurial
+ If no revision is given, all revisions will be converted. Otherwise,
+ convert will only import up to the named revision (given in a format
+ understood by the source).
+
If no destination directory name is specified, it defaults to the
basename of the source with '-hg' appended. If the destination
repository doesn't exist, it will be created.
- If <mapfile> isn't given, it will be put in a default location
- (<dest>/.hg/shamap by default). The <mapfile> is a simple text
+ If <revmapfile> isn't given, it will be put in a default location
+ (<dest>/.hg/shamap by default). The <revmapfile> is a simple text
file that maps each source commit ID to the destination ID for
that revision, like so:
<source ID> <destination ID>
@@ -267,19 +381,16 @@
that use unix logins to identify authors (eg: CVS). One line per author
mapping and the line format is:
srcauthor=whatever string you want
- '''
+ """
util._encoding = 'UTF-8'
- srcc = converter(ui, src)
- if not hasattr(srcc, "getcommit"):
- raise util.Abort("%s: can't read from this repo type" % src)
-
if not dest:
dest = hg.defaultdest(src) + "-hg"
ui.status("assuming destination %s\n" % dest)
# Try to be smart and initalize things when required
+ created = False
if os.path.isdir(dest):
if len(os.listdir(dest)) > 0:
try:
@@ -294,29 +405,46 @@
else:
ui.status("initializing destination %s repository\n" % dest)
hg.repository(ui, dest, create=True)
+ created = True
elif os.path.exists(dest):
raise util.Abort("destination %s exists and is not a directory" % dest)
else:
ui.status("initializing destination %s repository\n" % dest)
hg.repository(ui, dest, create=True)
+ created = True
- destc = converter(ui, dest)
- if not hasattr(destc, "putcommit"):
- raise util.Abort("%s: can't write to this repo type" % src)
+ destc = convertsink(ui, dest)
- if not mapfile:
+ try:
+ srcc = convertsource(ui, src, rev=opts.get('rev'))
+ except Exception:
+ if created:
+ shutil.rmtree(dest, True)
+ raise
+
+ if not revmapfile:
try:
- mapfile = destc.mapfile()
+ revmapfile = destc.revmapfile()
except:
- mapfile = os.path.join(destc, "map")
+ revmapfile = os.path.join(destc, "map")
+
- c = convert(ui, srcc, destc, mapfile, opts)
+ c = convert(ui, srcc, destc, revmapfile, filemapper(ui, opts['filemap']),
+ opts)
c.convert()
+
cmdtable = {
"convert":
(_convert,
[('A', 'authors', '', 'username mapping filename'),
+ ('', 'filemap', '', 'remap file names using contents of file'),
+ ('r', 'rev', '', 'import up to target revision REV'),
('', 'datesort', None, 'try to sort changesets by date')],
'hg convert [OPTION]... SOURCE [DEST [MAPFILE]]'),
+ "debugsvnlog":
+ (debugsvnlog,
+ [],
+ 'hg debugsvnlog'),
}
+
--- a/hgext/convert/common.py Wed Aug 08 22:47:30 2007 +0200
+++ b/hgext/convert/common.py Wed Aug 08 23:00:01 2007 +0200
@@ -1,21 +1,46 @@
# common code for the convert extension
+import base64
+import cPickle as pickle
+
+def encodeargs(args):
+ def encodearg(s):
+ lines = base64.encodestring(s)
+ lines = [l.splitlines()[0] for l in lines]
+ return ''.join(lines)
+
+ s = pickle.dumps(args)
+ return encodearg(s)
+
+def decodeargs(s):
+ s = base64.decodestring(s)
+ return pickle.loads(s)
class NoRepo(Exception): pass
class commit(object):
- def __init__(self, **parts):
- for x in "author date desc parents".split():
- if not x in parts:
- raise util.Abort("commit missing field %s" % x)
- self.__dict__.update(parts)
+ def __init__(self, author, date, desc, parents, branch=None, rev=None):
+ self.author = author
+ self.date = date
+ self.desc = desc
+ self.parents = parents
+ self.branch = branch
+ self.rev = rev
class converter_source(object):
"""Conversion source interface"""
- def __init__(self, ui, path):
+ def __init__(self, ui, path, rev=None):
"""Initialize conversion source (or raise NoRepo("message")
exception if path is not a valid repository)"""
- raise NotImplementedError()
+ self.ui = ui
+ self.path = path
+ self.rev = rev
+
+ self.encoding = 'utf-8'
+
+ def setrevmap(self, revmap):
+ """set the map of already-converted revisions"""
+ pass
def getheads(self):
"""Return a list of this repository's heads"""
@@ -30,10 +55,12 @@
raise NotImplementedError()
def getchanges(self, version):
- """Return sorted list of (filename, id) tuples for all files changed in rev.
+ """Returns a tuple of (files, copies)
+ Files is a sorted list of (filename, id) tuples for all files changed
+ in version, where id is the source revision id of the file.
- id just tells us which revision to return in getfile(), e.g. in
- git it's an object hash."""
+ copies is a dictionary of dest: source
+ """
raise NotImplementedError()
def getcommit(self, version):
@@ -44,6 +71,18 @@
"""Return the tags as a dictionary of name: revision"""
raise NotImplementedError()
+ def recode(self, s, encoding=None):
+ if not encoding:
+ encoding = self.encoding or 'utf-8'
+
+ try:
+ return s.decode(encoding).encode("utf-8")
+ except:
+ try:
+ return s.decode("latin-1").encode("utf-8")
+ except:
+ return s.decode(encoding, "replace").encode("utf-8")
+
class converter_sink(object):
"""Conversion sink (target) interface"""
@@ -56,7 +95,7 @@
"""Return a list of this repository's heads"""
raise NotImplementedError()
- def mapfile(self):
+ def revmapfile(self):
"""Path to a file that will contain lines
source_rev_id sink_rev_id
mapping equivalent revision identifiers for each system."""
@@ -94,3 +133,4 @@
"""Put tags into sink.
tags: {tagname: sink_rev_id, ...}"""
raise NotImplementedError()
+
--- a/hgext/convert/cvs.py Wed Aug 08 22:47:30 2007 +0200
+++ b/hgext/convert/cvs.py Wed Aug 08 23:00:01 2007 +0200
@@ -6,9 +6,9 @@
from common import NoRepo, commit, converter_source
class convert_cvs(converter_source):
- def __init__(self, ui, path):
- self.path = path
- self.ui = ui
+ def __init__(self, ui, path, rev=None):
+ super(convert_cvs, self).__init__(ui, path, rev=rev)
+
cvs = os.path.join(path, "CVS")
if not os.path.exists(cvs):
raise NoRepo("couldn't open CVS repo %s" % path)
@@ -29,15 +29,32 @@
if self.changeset:
return
+ maxrev = 0
+ cmd = 'cvsps -A -u --cvs-direct -q'
+ if self.rev:
+ # TODO: handle tags
+ try:
+ # patchset number?
+ maxrev = int(self.rev)
+ except ValueError:
+ try:
+ # date
+ util.parsedate(self.rev, ['%Y/%m/%d %H:%M:%S'])
+ cmd = "%s -d '1970/01/01 00:00:01' -d '%s'" % (cmd, self.rev)
+ except util.Abort:
+ raise util.Abort('revision %s is not a patchset number or date' % self.rev)
+
d = os.getcwd()
try:
os.chdir(self.path)
id = None
state = 0
- for l in os.popen("cvsps -A -u --cvs-direct -q"):
+ for l in os.popen(cmd):
if state == 0: # header
if l.startswith("PatchSet"):
id = l[9:-2]
+ if maxrev and int(id) > maxrev:
+ state = 3
elif l.startswith("Date"):
date = util.parsedate(l[6:-1], ["%Y/%m/%d %H:%M:%S"])
date = util.datestr(date)
@@ -62,8 +79,6 @@
if l == "Members: \n":
files = {}
log = self.recode(log[:-1])
- if log.isspace():
- log = "*** empty log message ***\n"
state = 2
else:
log += l
@@ -85,6 +100,8 @@
rev = l[colon+1:-2]
rev = rev.split("->")[1]
files[file] = rev
+ elif state == 3:
+ continue
self.heads = self.lastbranch.values()
finally:
@@ -237,10 +254,7 @@
files = self.files[rev]
cl = files.items()
cl.sort()
- return cl
-
- def recode(self, text):
- return text.decode(self.encoding, "replace").encode("utf-8")
+ return (cl, {})
def getcommit(self, rev):
return self.changeset[rev]
--- a/hgext/convert/git.py Wed Aug 08 22:47:30 2007 +0200
+++ b/hgext/convert/git.py Wed Aug 08 23:00:01 2007 +0200
@@ -4,32 +4,29 @@
from common import NoRepo, commit, converter_source
-def recode(s):
- try:
- return s.decode("utf-8").encode("utf-8")
- except:
- try:
- return s.decode("latin-1").encode("utf-8")
- except:
- return s.decode("utf-8", "replace").encode("utf-8")
+class convert_git(converter_source):
+ def gitcmd(self, s):
+ return os.popen('GIT_DIR=%s %s' % (self.path, s))
-class convert_git(converter_source):
- def __init__(self, ui, path):
+ def __init__(self, ui, path, rev=None):
+ super(convert_git, self).__init__(ui, path, rev=rev)
+
if os.path.isdir(path + "/.git"):
path += "/.git"
- self.path = path
- self.ui = ui
if not os.path.exists(path + "/objects"):
raise NoRepo("couldn't open GIT repo %s" % path)
+ self.path = path
def getheads(self):
- fh = os.popen("GIT_DIR=%s git-rev-parse --verify HEAD" % self.path)
- return [fh.read()[:-1]]
+ if not self.rev:
+ return self.gitcmd('git-rev-parse --branches').read().splitlines()
+ else:
+ fh = self.gitcmd("git-rev-parse --verify %s" % self.rev)
+ return [fh.read()[:-1]]
def catfile(self, rev, type):
if rev == "0" * 40: raise IOError()
- fh = os.popen("GIT_DIR=%s git-cat-file %s %s 2>/dev/null"
- % (self.path, type, rev))
+ fh = self.gitcmd("git-cat-file %s %s 2>/dev/null" % (type, rev))
return fh.read()
def getfile(self, name, rev):
@@ -40,8 +37,7 @@
def getchanges(self, version):
self.modecache = {}
- fh = os.popen("GIT_DIR=%s git-diff-tree --root -m -r %s"
- % (self.path, version))
+ fh = self.gitcmd("git-diff-tree --root -m -r %s" % version)
changes = []
for l in fh:
if "\t" not in l: continue
@@ -52,13 +48,13 @@
s = (m[1] == "120000")
self.modecache[(f, h)] = (p and "x") or (s and "l") or ""
changes.append((f, h))
- return changes
+ return (changes, {})
def getcommit(self, version):
c = self.catfile(version, "commit") # read the commit hash
end = c.find("\n\n")
message = c[end+2:]
- message = recode(message)
+ message = self.recode(message)
l = c[:end].splitlines()
manifest = l[0].split()[1]
parents = []
@@ -69,13 +65,13 @@
tm, tz = p[-2:]
author = " ".join(p[:-2])
if author[0] == "<": author = author[1:-1]
- author = recode(author)
+ author = self.recode(author)
if n == "committer":
p = v.split()
tm, tz = p[-2:]
committer = " ".join(p[:-2])
if committer[0] == "<": committer = committer[1:-1]
- committer = recode(committer)
+ committer = self.recode(committer)
message += "\ncommitter: %s\n" % committer
if n == "parent": parents.append(v)
@@ -84,12 +80,13 @@
date = tm + " " + str(tz)
author = author or "unknown"
- c = commit(parents=parents, date=date, author=author, desc=message)
+ c = commit(parents=parents, date=date, author=author, desc=message,
+ rev=version)
return c
def gettags(self):
tags = {}
- fh = os.popen('git-ls-remote --tags "%s" 2>/dev/null' % self.path)
+ fh = self.gitcmd('git-ls-remote --tags "%s" 2>/dev/null' % self.path)
prefix = 'refs/tags/'
for line in fh:
line = line.strip()
--- a/hgext/convert/hg.py Wed Aug 08 22:47:30 2007 +0200
+++ b/hgext/convert/hg.py Wed Aug 08 23:00:01 2007 +0200
@@ -1,20 +1,40 @@
# hg backend for convert extension
+# Note for hg->hg conversion: Old versions of Mercurial didn't trim
+# the whitespace from the ends of commit messages, but new versions
+# do. Changesets created by those older versions, then converted, may
+# thus have different hashes for changesets that are otherwise
+# identical.
+
+
import os, time
-from mercurial import hg
+from mercurial.i18n import _
+from mercurial.node import *
+from mercurial import hg, lock, revlog, util
-from common import NoRepo, converter_sink
+from common import NoRepo, commit, converter_source, converter_sink
-class convert_mercurial(converter_sink):
+class mercurial_sink(converter_sink):
def __init__(self, ui, path):
self.path = path
self.ui = ui
try:
self.repo = hg.repository(self.ui, path)
except:
- raise NoRepo("could open hg repo %s" % path)
+ raise NoRepo("could not open hg repo %s as sink" % path)
+ self.lock = None
+ self.wlock = None
+ self.branchnames = ui.configbool('convert', 'hg.usebranchnames', True)
- def mapfile(self):
+ def before(self):
+ self.wlock = self.repo.wlock()
+ self.lock = self.repo.lock()
+
+ def after(self):
+ self.lock = None
+ self.wlock = None
+
+ def revmapfile(self):
return os.path.join(self.path, ".hg", "shamap")
def authorfile(self):
@@ -22,12 +42,15 @@
def getheads(self):
h = self.repo.changelog.heads()
- return [ hg.hex(x) for x in h ]
+ return [ hex(x) for x in h ]
def putfile(self, f, e, data):
self.repo.wwrite(f, data, e)
- if self.repo.dirstate.state(f) == '?':
- self.repo.dirstate.update([f], "a")
+ if f not in self.repo.dirstate:
+ self.repo.dirstate.add(f)
+
+ def copyfile(self, source, dest):
+ self.repo.copy(source, dest)
def delfile(self, f):
try:
@@ -37,7 +60,10 @@
pass
def putcommit(self, files, parents, commit):
- seen = {}
+ if not files:
+ return hex(self.repo.changelog.tip())
+
+ seen = {hex(nullid): 1}
pl = []
for p in parents:
if p not in seen:
@@ -51,16 +77,17 @@
text = commit.desc
extra = {}
- try:
- extra["branch"] = commit.branch
- except AttributeError:
- pass
+ if self.branchnames and commit.branch:
+ extra['branch'] = commit.branch
+ if commit.rev:
+ extra['convert_revision'] = commit.rev
while parents:
p1 = p2
p2 = parents.pop(0)
a = self.repo.rawcommit(files, text, commit.author, commit.date,
- hg.bin(p1), hg.bin(p2), extra=extra)
+ bin(p1), bin(p2), extra=extra)
+ self.repo.dirstate.invalidate()
text = "(octopus merge fixup)\n"
p2 = hg.hex(self.repo.changelog.tip())
@@ -90,5 +117,62 @@
if not oldlines: self.repo.add([".hgtags"])
date = "%s 0" % int(time.mktime(time.gmtime()))
self.repo.rawcommit([".hgtags"], "update tags", "convert-repo",
- date, self.repo.changelog.tip(), hg.nullid)
- return hg.hex(self.repo.changelog.tip())
+ date, self.repo.changelog.tip(), nullid)
+ return hex(self.repo.changelog.tip())
+
+class mercurial_source(converter_source):
+ def __init__(self, ui, path, rev=None):
+ converter_source.__init__(self, ui, path, rev)
+ self.repo = hg.repository(self.ui, path)
+ self.lastrev = None
+ self.lastctx = None
+
+ def changectx(self, rev):
+ if self.lastrev != rev:
+ self.lastctx = self.repo.changectx(rev)
+ self.lastrev = rev
+ return self.lastctx
+
+ def getheads(self):
+ if self.rev:
+ return [hex(self.repo.changectx(self.rev).node())]
+ else:
+ return [hex(node) for node in self.repo.heads()]
+
+ def getfile(self, name, rev):
+ try:
+ return self.changectx(rev).filectx(name).data()
+ except revlog.LookupError, err:
+ raise IOError(err)
+
+ def getmode(self, name, rev):
+ m = self.changectx(rev).manifest()
+ return (m.execf(name) and 'x' or '') + (m.linkf(name) and 'l' or '')
+
+ def getchanges(self, rev):
+ ctx = self.changectx(rev)
+ m, a, r = self.repo.status(ctx.parents()[0].node(), ctx.node())[:3]
+ changes = [(name, rev) for name in m + a + r]
+ changes.sort()
+ return (changes, self.getcopies(ctx))
+
+ def getcopies(self, ctx):
+ added = self.repo.status(ctx.parents()[0].node(), ctx.node())[1]
+ copies = {}
+ for name in added:
+ try:
+ copies[name] = ctx.filectx(name).renamed()[0]
+ except TypeError:
+ pass
+ return copies
+
+ def getcommit(self, rev):
+ ctx = self.changectx(rev)
+ parents = [hex(p.node()) for p in ctx.parents() if p.node() != nullid]
+ return commit(author=ctx.user(), date=util.datestr(ctx.date()),
+ desc=ctx.description(), parents=parents,
+ branch=ctx.branch())
+
+ def gettags(self):
+ tags = [t for t in self.repo.tagslist() if t[0] != 'tip']
+ return dict([(name, hex(node)) for name, node in tags])
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/hgext/convert/subversion.py Wed Aug 08 23:00:01 2007 +0200
@@ -0,0 +1,646 @@
+# Subversion 1.4/1.5 Python API backend
+#
+# Copyright(C) 2007 Daniel Holth et al
+#
+# Configuration options:
+#
+# convert.svn.trunk
+# Relative path to the trunk (default: "trunk")
+# convert.svn.branches
+# Relative path to tree of branches (default: "branches")
+#
+# Set these in a hgrc, or on the command line as follows:
+#
+# hg convert --config convert.svn.trunk=wackoname [...]
+
+import locale
+import os
+import sys
+import cPickle as pickle
+from mercurial import util
+
+# Subversion stuff. Works best with very recent Python SVN bindings
+# e.g. SVN 1.5 or backports. Thanks to the bzr folks for enhancing
+# these bindings.
+
+from cStringIO import StringIO
+
+from common import NoRepo, commit, converter_source, encodeargs, decodeargs
+
+try:
+ from svn.core import SubversionException, Pool
+ import svn
+ import svn.client
+ import svn.core
+ import svn.ra
+ import svn.delta
+ import transport
+except ImportError:
+ pass
+
+def geturl(path):
+ try:
+ return svn.client.url_from_path(svn.core.svn_path_canonicalize(path))
+ except SubversionException:
+ pass
+ if os.path.isdir(path):
+ return 'file://%s' % os.path.normpath(os.path.abspath(path))
+ return path
+
+def optrev(number):
+ optrev = svn.core.svn_opt_revision_t()
+ optrev.kind = svn.core.svn_opt_revision_number
+ optrev.value.number = number
+ return optrev
+
+class changedpath(object):
+ def __init__(self, p):
+ self.copyfrom_path = p.copyfrom_path
+ self.copyfrom_rev = p.copyfrom_rev
+ self.action = p.action
+
+def get_log_child(fp, url, paths, start, end, limit=0, discover_changed_paths=True,
+ strict_node_history=False):
+ protocol = -1
+ def receiver(orig_paths, revnum, author, date, message, pool):
+ if orig_paths is not None:
+ for k, v in orig_paths.iteritems():
+ orig_paths[k] = changedpath(v)
+ pickle.dump((orig_paths, revnum, author, date, message),
+ fp, protocol)
+
+ try:
+ # Use an ra of our own so that our parent can consume
+ # our results without confusing the server.
+ t = transport.SvnRaTransport(url=url)
+ svn.ra.get_log(t.ra, paths, start, end, limit,
+ discover_changed_paths,
+ strict_node_history,
+ receiver)
+ except SubversionException, (inst, num):
+ pickle.dump(num, fp, protocol)
+ else:
+ pickle.dump(None, fp, protocol)
+ fp.close()
+
+def debugsvnlog(ui, **opts):
+ """Fetch SVN log in a subprocess and channel them back to parent to
+ avoid memory collection issues.
+ """
+ util.set_binary(sys.stdin)
+ util.set_binary(sys.stdout)
+ args = decodeargs(sys.stdin.read())
+ get_log_child(sys.stdout, *args)
+
+# SVN conversion code stolen from bzr-svn and tailor
+class convert_svn(converter_source):
+ def __init__(self, ui, url, rev=None):
+ super(convert_svn, self).__init__(ui, url, rev=rev)
+
+ try:
+ SubversionException
+ except NameError:
+ msg = 'subversion python bindings could not be loaded\n'
+ ui.warn(msg)
+ raise NoRepo(msg)
+
+ self.encoding = locale.getpreferredencoding()
+ self.lastrevs = {}
+
+ latest = None
+ try:
+ # Support file://path@rev syntax. Useful e.g. to convert
+ # deleted branches.
+ at = url.rfind('@')
+ if at >= 0:
+ latest = int(url[at+1:])
+ url = url[:at]
+ except ValueError, e:
+ pass
+ self.url = geturl(url)
+ self.encoding = 'UTF-8' # Subversion is always nominal UTF-8
+ try:
+ self.transport = transport.SvnRaTransport(url=self.url)
+ self.ra = self.transport.ra
+ self.ctx = self.transport.client
+ self.base = svn.ra.get_repos_root(self.ra)
+ self.module = self.url[len(self.base):]
+ self.modulemap = {} # revision, module
+ self.commits = {}
+ self.paths = {}
+ self.uuid = svn.ra.get_uuid(self.ra).decode(self.encoding)
+ except SubversionException, e:
+ raise NoRepo("couldn't open SVN repo %s" % self.url)
+
+ if rev:
+ try:
+ latest = int(rev)
+ except ValueError:
+ raise util.Abort('svn: revision %s is not an integer' % rev)
+
+ try:
+ self.get_blacklist()
+ except IOError, e:
+ pass
+
+ self.last_changed = self.latest(self.module, latest)
+
+ self.head = self.revid(self.last_changed)
+
+ def setrevmap(self, revmap):
+ lastrevs = {}
+ for revid in revmap.keys():
+ uuid, module, revnum = self.revsplit(revid)
+ lastrevnum = lastrevs.setdefault(module, revnum)
+ if revnum > lastrevnum:
+ lastrevs[module] = revnum
+ self.lastrevs = lastrevs
+
+ def exists(self, path, optrev):
+ try:
+ return svn.client.ls(self.url.rstrip('/') + '/' + path,
+ optrev, False, self.ctx)
+ except SubversionException, err:
+ return []
+
+ def getheads(self):
+ # detect standard /branches, /tags, /trunk layout
+ rev = optrev(self.last_changed)
+ rpath = self.url.strip('/')
+ cfgtrunk = self.ui.config('convert', 'svn.trunk')
+ cfgbranches = self.ui.config('convert', 'svn.branches')
+ trunk = (cfgtrunk or 'trunk').strip('/')
+ branches = (cfgbranches or 'branches').strip('/')
+ if self.exists(trunk, rev) and self.exists(branches, rev):
+ self.ui.note('found trunk at %r and branches at %r\n' %
+ (trunk, branches))
+ oldmodule = self.module
+ self.module += '/' + trunk
+ lt = self.latest(self.module, self.last_changed)
+ self.head = self.revid(lt)
+ self.heads = [self.head]
+ branchnames = svn.client.ls(rpath + '/' + branches, rev, False,
+ self.ctx)
+ for branch in branchnames.keys():
+ if oldmodule:
+ module = '/' + oldmodule + '/' + branches + '/' + branch
+ else:
+ module = '/' + branches + '/' + branch
+ brevnum = self.latest(module, self.last_changed)
+ brev = self.revid(brevnum, module)
+ self.ui.note('found branch %s at %d\n' % (branch, brevnum))
+ self.heads.append(brev)
+ elif cfgtrunk or cfgbranches:
+ raise util.Abort('trunk/branch layout expected, but not found')
+ else:
+ self.ui.note('working with one branch\n')
+ self.heads = [self.head]
+ return self.heads
+
+ def getfile(self, file, rev):
+ data, mode = self._getfile(file, rev)
+ self.modecache[(file, rev)] = mode
+ return data
+
+ def getmode(self, file, rev):
+ return self.modecache[(file, rev)]
+
+ def getchanges(self, rev):
+ self.modecache = {}
+ (paths, parents) = self.paths[rev]
+ files, copies = self.expandpaths(rev, paths, parents)
+ files.sort()
+ files = zip(files, [rev] * len(files))
+
+ # caller caches the result, so free it here to release memory
+ del self.paths[rev]
+ return (files, copies)
+
+ def getcommit(self, rev):
+ if rev not in self.commits:
+ uuid, module, revnum = self.revsplit(rev)
+ self.module = module
+ self.reparent(module)
+ stop = self.lastrevs.get(module, 0)
+ self._fetch_revisions(from_revnum=revnum, to_revnum=stop)
+ commit = self.commits[rev]
+ # caller caches the result, so free it here to release memory
+ del self.commits[rev]
+ return commit
+
+ def get_log(self, paths, start, end, limit=0, discover_changed_paths=True,
+ strict_node_history=False):
+
+ def parent(fp):
+ while True:
+ entry = pickle.load(fp)
+ try:
+ orig_paths, revnum, author, date, message = entry
+ except:
+ if entry is None:
+ break
+ raise SubversionException("child raised exception", entry)
+ yield entry
+
+ args = [self.url, paths, start, end, limit, discover_changed_paths,
+ strict_node_history]
+ arg = encodeargs(args)
+ hgexe = util.hgexecutable()
+ cmd = '%s debugsvnlog' % util.shellquote(hgexe)
+ stdin, stdout = os.popen2(cmd, 'b')
+
+ stdin.write(arg)
+ stdin.close()
+
+ for p in parent(stdout):
+ yield p
+
+ def gettags(self):
+ tags = {}
+ start = self.revnum(self.head)
+ try:
+ for entry in self.get_log(['/tags'], 0, start):
+ orig_paths, revnum, author, date, message = entry
+ for path in orig_paths:
+ if not path.startswith('/tags/'):
+ continue
+ ent = orig_paths[path]
+ source = ent.copyfrom_path
+ rev = ent.copyfrom_rev
+ tag = path.split('/', 2)[2]
+ tags[tag] = self.revid(rev, module=source)
+ except SubversionException, (inst, num):
+ self.ui.note('no tags found at revision %d\n' % start)
+ return tags
+
+ # -- helper functions --
+
+ def revid(self, revnum, module=None):
+ if not module:
+ module = self.module
+ return (u"svn:%s%s@%s" % (self.uuid, module, revnum)).decode(self.encoding)
+
+ def revnum(self, rev):
+ return int(rev.split('@')[-1])
+
+ def revsplit(self, rev):
+ url, revnum = rev.encode(self.encoding).split('@', 1)
+ revnum = int(revnum)
+ parts = url.split('/', 1)
+ uuid = parts.pop(0)[4:]
+ mod = ''
+ if parts:
+ mod = '/' + parts[0]
+ return uuid, mod, revnum
+
+ def latest(self, path, stop=0):
+ 'find the latest revision affecting path, up to stop'
+ if not stop:
+ stop = svn.ra.get_latest_revnum(self.ra)
+ try:
+ self.reparent('')
+ dirent = svn.ra.stat(self.ra, path.strip('/'), stop)
+ self.reparent(self.module)
+ except SubversionException:
+ dirent = None
+ if not dirent:
+ raise util.Abort('%s not found up to revision %d' % (path, stop))
+
+ return dirent.created_rev
+
+ def get_blacklist(self):
+ """Avoid certain revision numbers.
+ It is not uncommon for two nearby revisions to cancel each other
+ out, e.g. 'I copied trunk into a subdirectory of itself instead
+ of making a branch'. The converted repository is significantly
+ smaller if we ignore such revisions."""
+ self.blacklist = set()
+ blacklist = self.blacklist
+ for line in file("blacklist.txt", "r"):
+ if not line.startswith("#"):
+ try:
+ svn_rev = int(line.strip())
+ blacklist.add(svn_rev)
+ except ValueError, e:
+ pass # not an integer or a comment
+
+ def is_blacklisted(self, svn_rev):
+ return svn_rev in self.blacklist
+
+ def reparent(self, module):
+ svn_url = self.base + module
+ self.ui.debug("reparent to %s\n" % svn_url.encode(self.encoding))
+ svn.ra.reparent(self.ra, svn_url.encode(self.encoding))
+
+ def expandpaths(self, rev, paths, parents):
+ def get_entry_from_path(path, module=self.module):
+ # Given the repository url of this wc, say
+ # "http://server/plone/CMFPlone/branches/Plone-2_0-branch"
+ # extract the "entry" portion (a relative path) from what
+ # svn log --xml says, ie
+ # "/CMFPlone/branches/Plone-2_0-branch/tests/PloneTestCase.py"
+ # that is to say "tests/PloneTestCase.py"
+ if path.startswith(module):
+ relative = path[len(module):]
+ if relative.startswith('/'):
+ return relative[1:]
+ else:
+ return relative
+
+ # The path is outside our tracked tree...
+ self.ui.debug('%r is not under %r, ignoring\n' % (path, module))
+ return None
+
+ entries = []
+ copyfrom = {} # Map of entrypath, revision for finding source of deleted revisions.
+ copies = {}
+ revnum = self.revnum(rev)
+
+ if revnum in self.modulemap:
+ new_module = self.modulemap[revnum]
+ if new_module != self.module:
+ self.module = new_module
+ self.reparent(self.module)
+
+ for path, ent in paths:
+ entrypath = get_entry_from_path(path, module=self.module)
+ entry = entrypath.decode(self.encoding)
+
+ kind = svn.ra.check_path(self.ra, entrypath, revnum)
+ if kind == svn.core.svn_node_file:
+ if ent.copyfrom_path:
+ copyfrom_path = get_entry_from_path(ent.copyfrom_path)
+ if copyfrom_path:
+ self.ui.debug("Copied to %s from %s@%s\n" % (entry, copyfrom_path, ent.copyfrom_rev))
+ # It's probably important for hg that the source
+ # exists in the revision's parent, not just the
+ # ent.copyfrom_rev
+ fromkind = svn.ra.check_path(self.ra, copyfrom_path, ent.copyfrom_rev)
+ if fromkind != 0:
+ copies[self.recode(entry)] = self.recode(copyfrom_path)
+ entries.append(self.recode(entry))
+ elif kind == 0: # gone, but had better be a deleted *file*
+ self.ui.debug("gone from %s\n" % ent.copyfrom_rev)
+
+ # if a branch is created but entries are removed in the same
+ # changeset, get the right fromrev
+ if parents:
+ uuid, old_module, fromrev = self.revsplit(parents[0])
+ else:
+ fromrev = revnum - 1
+ # might always need to be revnum - 1 in these 3 lines?
+ old_module = self.modulemap.get(fromrev, self.module)
+
+ basepath = old_module + "/" + get_entry_from_path(path, module=self.module)
+ entrypath = old_module + "/" + get_entry_from_path(path, module=self.module)
+
+ def lookup_parts(p):
+ rc = None
+ parts = p.split("/")
+ for i in range(len(parts)):
+ part = "/".join(parts[:i])
+ info = part, copyfrom.get(part, None)
+ if info[1] is not None:
+ self.ui.debug("Found parent directory %s\n" % info[1])
+ rc = info
+ return rc
+
+ self.ui.debug("base, entry %s %s\n" % (basepath, entrypath))
+
+ frompath, froment = lookup_parts(entrypath) or (None, revnum - 1)
+
+ # need to remove fragment from lookup_parts and replace with copyfrom_path
+ if frompath is not None:
+ self.ui.debug("munge-o-matic\n")
+ self.ui.debug(entrypath + '\n')
+ self.ui.debug(entrypath[len(frompath):] + '\n')
+ entrypath = froment.copyfrom_path + entrypath[len(frompath):]
+ fromrev = froment.copyfrom_rev
+ self.ui.debug("Info: %s %s %s %s\n" % (frompath, froment, ent, entrypath))
+
+ fromkind = svn.ra.check_path(self.ra, entrypath, fromrev)
+ if fromkind == svn.core.svn_node_file: # a deleted file
+ entries.append(self.recode(entry))
+ elif fromkind == svn.core.svn_node_dir:
+ # print "Deleted/moved non-file:", revnum, path, ent
+ # children = self._find_children(path, revnum - 1)
+ # print "find children %s@%d from %d action %s" % (path, revnum, ent.copyfrom_rev, ent.action)
+ # Sometimes this is tricky. For example: in
+ # The Subversion Repository revision 6940 a dir
+ # was copied and one of its files was deleted
+ # from the new location in the same commit. This
+ # code can't deal with that yet.
+ if ent.action == 'C':
+ children = self._find_children(path, fromrev)
+ else:
+ oroot = entrypath.strip('/')
+ nroot = path.strip('/')
+ children = self._find_children(oroot, fromrev)
+ children = [s.replace(oroot,nroot) for s in children]
+ # Mark all [files, not directories] as deleted.
+ for child in children:
+ # Can we move a child directory and its
+ # parent in the same commit? (probably can). Could
+ # cause problems if instead of revnum -1,
+ # we have to look in (copyfrom_path, revnum - 1)
+ entrypath = get_entry_from_path("/" + child, module=old_module)
+ if entrypath:
+ entry = self.recode(entrypath.decode(self.encoding))
+ if entry in copies:
+ # deleted file within a copy
+ del copies[entry]
+ else:
+ entries.append(entry)
+ else:
+ self.ui.debug('unknown path in revision %d: %s\n' % \
+ (revnum, path))
+ elif kind == svn.core.svn_node_dir:
+ # Should probably synthesize normal file entries
+ # and handle as above to clean up copy/rename handling.
+
+ # If the directory just had a prop change,
+ # then we shouldn't need to look for its children.
+ # Also this could create duplicate entries. Not sure
+ # whether this will matter. Maybe should make entries a set.
+ # print "Changed directory", revnum, path, ent.action, ent.copyfrom_path, ent.copyfrom_rev
+ # This will fail if a directory was copied
+ # from another branch and then some of its files
+ # were deleted in the same transaction.
+ children = self._find_children(path, revnum)
+ children.sort()
+ for child in children:
+ # Can we move a child directory and its
+ # parent in the same commit? (probably can). Could
+ # cause problems if instead of revnum -1,
+ # we have to look in (copyfrom_path, revnum - 1)
+ entrypath = get_entry_from_path("/" + child, module=self.module)
+ # print child, self.module, entrypath
+ if entrypath:
+ # Need to filter out directories here...
+ kind = svn.ra.check_path(self.ra, entrypath, revnum)
+ if kind != svn.core.svn_node_dir:
+ entries.append(self.recode(entrypath))
+
+ # Copies here (must copy all from source)
+ # Probably not a real problem for us if
+ # source does not exist
+
+ # Can do this with the copy command "hg copy"
+ # if ent.copyfrom_path:
+ # copyfrom_entry = get_entry_from_path(ent.copyfrom_path.decode(self.encoding),
+ # module=self.module)
+ # copyto_entry = entrypath
+ #
+ # print "copy directory", copyfrom_entry, 'to', copyto_entry
+ #
+ # copies.append((copyfrom_entry, copyto_entry))
+
+ if ent.copyfrom_path:
+ copyfrom_path = ent.copyfrom_path.decode(self.encoding)
+ copyfrom_entry = get_entry_from_path(copyfrom_path, module=self.module)
+ if copyfrom_entry:
+ copyfrom[path] = ent
+ self.ui.debug("mark %s came from %s\n" % (path, copyfrom[path]))
+
+ # Good, /probably/ a regular copy. Really should check
+ # to see whether the parent revision actually contains
+ # the directory in question.
+ children = self._find_children(self.recode(copyfrom_path), ent.copyfrom_rev)
+ children.sort()
+ for child in children:
+ entrypath = get_entry_from_path("/" + child, module=self.module)
+ if entrypath:
+ entry = entrypath.decode(self.encoding)
+ # print "COPY COPY From", copyfrom_entry, entry
+ copyto_path = path + entry[len(copyfrom_entry):]
+ copyto_entry = get_entry_from_path(copyto_path, module=self.module)
+ # print "COPY", entry, "COPY To", copyto_entry
+ copies[self.recode(copyto_entry)] = self.recode(entry)
+ # copy from quux splort/quuxfile
+
+ return (entries, copies)
+
+ def _fetch_revisions(self, from_revnum = 0, to_revnum = 347):
+ self.child_cset = None
+ def parselogentry(orig_paths, revnum, author, date, message):
+ self.ui.debug("parsing revision %d (%d changes)\n" %
+ (revnum, len(orig_paths)))
+
+ if revnum in self.modulemap:
+ new_module = self.modulemap[revnum]
+ if new_module != self.module:
+ self.module = new_module
+ self.reparent(self.module)
+
+ rev = self.revid(revnum)
+ # branch log might return entries for a parent we already have
+ if (rev in self.commits or
+ (revnum < self.lastrevs.get(self.module, 0))):
+ return
+
+ parents = []
+ orig_paths = orig_paths.items()
+ orig_paths.sort()
+
+ # check whether this revision is the start of a branch
+ path, ent = orig_paths and orig_paths[0] or (None, None)
+ if ent and path == self.module:
+ if ent.copyfrom_path:
+ # ent.copyfrom_rev may not be the actual last revision
+ prev = self.latest(ent.copyfrom_path, ent.copyfrom_rev)
+ self.modulemap[prev] = ent.copyfrom_path
+ parents = [self.revid(prev, ent.copyfrom_path)]
+ self.ui.note('found parent of branch %s at %d: %s\n' % \
+ (self.module, prev, ent.copyfrom_path))
+ else:
+ self.ui.debug("No copyfrom path, don't know what to do.\n")
+
+ self.modulemap[revnum] = self.module # track backwards in time
+
+ paths = []
+ # filter out unrelated paths
+ for path, ent in orig_paths:
+ if not path.startswith(self.module):
+ self.ui.debug("boring@%s: %s\n" % (revnum, path))
+ continue
+ paths.append((path, ent))
+
+ self.paths[rev] = (paths, parents)
+
+ # Example SVN datetime. Includes microseconds.
+ # ISO-8601 conformant
+ # '2007-01-04T17:35:00.902377Z'
+ date = util.parsedate(date[:18] + " UTC", ["%Y-%m-%dT%H:%M:%S"])
+
+ log = message and self.recode(message)
+ author = author and self.recode(author) or ''
+ try:
+ branch = self.module.split("/")[-1]
+ if branch == 'trunk':
+ branch = ''
+ except IndexError:
+ branch = None
+
+ cset = commit(author=author,
+ date=util.datestr(date),
+ desc=log,
+ parents=parents,
+ branch=branch,
+ rev=rev.encode('utf-8'))
+
+ self.commits[rev] = cset
+ if self.child_cset and not self.child_cset.parents:
+ self.child_cset.parents = [rev]
+ self.child_cset = cset
+
+ self.ui.note('fetching revision log for "%s" from %d to %d\n' %
+ (self.module, from_revnum, to_revnum))
+
+ try:
+ for entry in self.get_log([self.module], from_revnum, to_revnum):
+ orig_paths, revnum, author, date, message = entry
+ if self.is_blacklisted(revnum):
+ self.ui.note('skipping blacklisted revision %d\n' % revnum)
+ continue
+ if orig_paths is None:
+ self.ui.debug('revision %d has no entries\n' % revnum)
+ continue
+ parselogentry(orig_paths, revnum, author, date, message)
+ except SubversionException, (inst, num):
+ if num == svn.core.SVN_ERR_FS_NO_SUCH_REVISION:
+ raise NoSuchRevision(branch=self,
+ revision="Revision number %d" % to_revnum)
+ raise
+
+ def _getfile(self, file, rev):
+ io = StringIO()
+ # TODO: ra.get_file transmits the whole file instead of diffs.
+ mode = ''
+ try:
+ revnum = self.revnum(rev)
+ if self.module != self.modulemap[revnum]:
+ self.module = self.modulemap[revnum]
+ self.reparent(self.module)
+ info = svn.ra.get_file(self.ra, file, revnum, io)
+ if isinstance(info, list):
+ info = info[-1]
+ mode = ("svn:executable" in info) and 'x' or ''
+ mode = ("svn:special" in info) and 'l' or mode
+ except SubversionException, e:
+ notfound = (svn.core.SVN_ERR_FS_NOT_FOUND,
+ svn.core.SVN_ERR_RA_DAV_PATH_NOT_FOUND)
+ if e.apr_err in notfound: # File not found
+ raise IOError()
+ raise
+ data = io.getvalue()
+ if mode == 'l':
+ link_prefix = "link "
+ if data.startswith(link_prefix):
+ data = data[len(link_prefix):]
+ return data, mode
+
+ def _find_children(self, path, revnum):
+ path = path.strip('/')
+ pool = Pool()
+ rpath = '/'.join([self.base, path]).strip('/')
+ return ['%s/%s' % (path, x) for x in svn.client.ls(rpath, optrev(revnum), True, self.ctx, pool).keys()]
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/hgext/convert/transport.py Wed Aug 08 23:00:01 2007 +0200
@@ -0,0 +1,129 @@
+# -*- coding: utf-8 -*-
+
+# Copyright (C) 2007 Daniel Holth <dholth@fastmail.fm>
+# This is a stripped-down version of the original bzr-svn transport.py,
+# Copyright (C) 2006 Jelmer Vernooij <jelmer@samba.org>
+
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+
+from cStringIO import StringIO
+import os
+from tempfile import mktemp
+
+from svn.core import SubversionException, Pool
+import svn.ra
+import svn.client
+import svn.core
+
+# Some older versions of the Python bindings need to be
+# explicitly initialized. But what we want to do probably
+# won't work worth a darn against those libraries anyway!
+svn.ra.initialize()
+
+svn_config = svn.core.svn_config_get_config(None)
+
+
+def _create_auth_baton(pool):
+ """Create a Subversion authentication baton. """
+ import svn.client
+ # Give the client context baton a suite of authentication
+ # providers.h
+ providers = [
+ svn.client.get_simple_provider(pool),
+ svn.client.get_username_provider(pool),
+ svn.client.get_ssl_client_cert_file_provider(pool),
+ svn.client.get_ssl_client_cert_pw_file_provider(pool),
+ svn.client.get_ssl_server_trust_file_provider(pool),
+ ]
+ # Platform-dependant authentication methods
+ if hasattr(svn.client, 'get_windows_simple_provider'):
+ providers.append(svn.client.get_windows_simple_provider(pool))
+
+ return svn.core.svn_auth_open(providers, pool)
+
+class NotBranchError(SubversionException):
+ pass
+
+class SvnRaTransport(object):
+ """
+ Open an ra connection to a Subversion repository.
+ """
+ def __init__(self, url="", ra=None):
+ self.pool = Pool()
+ self.svn_url = url
+ self.username = ''
+ self.password = ''
+
+ # Only Subversion 1.4 has reparent()
+ if ra is None or not hasattr(svn.ra, 'reparent'):
+ self.client = svn.client.create_context(self.pool)
+ ab = _create_auth_baton(self.pool)
+ if False:
+ svn.core.svn_auth_set_parameter(
+ ab, svn.core.SVN_AUTH_PARAM_DEFAULT_USERNAME, self.username)
+ svn.core.svn_auth_set_parameter(
+ ab, svn.core.SVN_AUTH_PARAM_DEFAULT_PASSWORD, self.password)
+ self.client.auth_baton = ab
+ self.client.config = svn_config
+ try:
+ self.ra = svn.client.open_ra_session(
+ self.svn_url.encode('utf8'),
+ self.client, self.pool)
+ except SubversionException, (inst, num):
+ if num in (svn.core.SVN_ERR_RA_ILLEGAL_URL,
+ svn.core.SVN_ERR_RA_LOCAL_REPOS_OPEN_FAILED,
+ svn.core.SVN_ERR_BAD_URL):
+ raise NotBranchError(url)
+ raise
+ else:
+ self.ra = ra
+ svn.ra.reparent(self.ra, self.svn_url.encode('utf8'))
+
+ class Reporter:
+ def __init__(self, (reporter, report_baton)):
+ self._reporter = reporter
+ self._baton = report_baton
+
+ def set_path(self, path, revnum, start_empty, lock_token, pool=None):
+ svn.ra.reporter2_invoke_set_path(self._reporter, self._baton,
+ path, revnum, start_empty, lock_token, pool)
+
+ def delete_path(self, path, pool=None):
+ svn.ra.reporter2_invoke_delete_path(self._reporter, self._baton,
+ path, pool)
+
+ def link_path(self, path, url, revision, start_empty, lock_token,
+ pool=None):
+ svn.ra.reporter2_invoke_link_path(self._reporter, self._baton,
+ path, url, revision, start_empty, lock_token,
+ pool)
+
+ def finish_report(self, pool=None):
+ svn.ra.reporter2_invoke_finish_report(self._reporter,
+ self._baton, pool)
+
+ def abort_report(self, pool=None):
+ svn.ra.reporter2_invoke_abort_report(self._reporter,
+ self._baton, pool)
+
+ def do_update(self, revnum, path, *args, **kwargs):
+ return self.Reporter(svn.ra.do_update(self.ra, revnum, path, *args, **kwargs))
+
+ def clone(self, offset=None):
+ """See Transport.clone()."""
+ if offset is None:
+ return self.__class__(self.base)
+
+ return SvnRaTransport(urlutils.join(self.base, offset), ra=self.ra)
--- a/hgext/extdiff.py Wed Aug 08 22:47:30 2007 +0200
+++ b/hgext/extdiff.py Wed Aug 08 23:00:01 2007 +0200
@@ -50,60 +50,61 @@
from mercurial.i18n import _
from mercurial.node import *
-from mercurial import cmdutil, util
+from mercurial import cmdutil, util, commands
import os, shutil, tempfile
+
+def snapshot_node(ui, repo, files, node, tmproot):
+ '''snapshot files as of some revision'''
+ mf = repo.changectx(node).manifest()
+ dirname = os.path.basename(repo.root)
+ if dirname == "":
+ dirname = "root"
+ dirname = '%s.%s' % (dirname, short(node))
+ base = os.path.join(tmproot, dirname)
+ os.mkdir(base)
+ ui.note(_('making snapshot of %d files from rev %s\n') %
+ (len(files), short(node)))
+ for fn in files:
+ if not fn in mf:
+ # skipping new file after a merge ?
+ continue
+ wfn = util.pconvert(fn)
+ ui.note(' %s\n' % wfn)
+ dest = os.path.join(base, wfn)
+ destdir = os.path.dirname(dest)
+ if not os.path.isdir(destdir):
+ os.makedirs(destdir)
+ data = repo.wwritedata(wfn, repo.file(wfn).read(mf[wfn]))
+ open(dest, 'wb').write(data)
+ return dirname
+
+
+def snapshot_wdir(ui, repo, files, tmproot):
+ '''snapshot files from working directory.
+ if not using snapshot, -I/-X does not work and recursive diff
+ in tools like kdiff3 and meld displays too many files.'''
+ dirname = os.path.basename(repo.root)
+ if dirname == "":
+ dirname = "root"
+ base = os.path.join(tmproot, dirname)
+ os.mkdir(base)
+ ui.note(_('making snapshot of %d files from working dir\n') %
+ (len(files)))
+ for fn in files:
+ wfn = util.pconvert(fn)
+ ui.note(' %s\n' % wfn)
+ dest = os.path.join(base, wfn)
+ destdir = os.path.dirname(dest)
+ if not os.path.isdir(destdir):
+ os.makedirs(destdir)
+ fp = open(dest, 'wb')
+ for chunk in util.filechunkiter(repo.wopener(wfn)):
+ fp.write(chunk)
+ return dirname
+
+
def dodiff(ui, repo, diffcmd, diffopts, pats, opts):
- def snapshot_node(files, node):
- '''snapshot files as of some revision'''
- mf = repo.changectx(node).manifest()
- dirname = os.path.basename(repo.root)
- if dirname == "":
- dirname = "root"
- dirname = '%s.%s' % (dirname, short(node))
- base = os.path.join(tmproot, dirname)
- os.mkdir(base)
- if not ui.quiet:
- ui.write_err(_('making snapshot of %d files from rev %s\n') %
- (len(files), short(node)))
- for fn in files:
- if not fn in mf:
- # skipping new file after a merge ?
- continue
- wfn = util.pconvert(fn)
- ui.note(' %s\n' % wfn)
- dest = os.path.join(base, wfn)
- destdir = os.path.dirname(dest)
- if not os.path.isdir(destdir):
- os.makedirs(destdir)
- data = repo.wwritedata(wfn, repo.file(wfn).read(mf[wfn]))
- open(dest, 'wb').write(data)
- return dirname
-
- def snapshot_wdir(files):
- '''snapshot files from working directory.
- if not using snapshot, -I/-X does not work and recursive diff
- in tools like kdiff3 and meld displays too many files.'''
- dirname = os.path.basename(repo.root)
- if dirname == "":
- dirname = "root"
- base = os.path.join(tmproot, dirname)
- os.mkdir(base)
- if not ui.quiet:
- ui.write_err(_('making snapshot of %d files from working dir\n') %
- (len(files)))
- for fn in files:
- wfn = util.pconvert(fn)
- ui.note(' %s\n' % wfn)
- dest = os.path.join(base, wfn)
- destdir = os.path.dirname(dest)
- if not os.path.isdir(destdir):
- os.makedirs(destdir)
- fp = open(dest, 'wb')
- for chunk in util.filechunkiter(repo.wopener(wfn)):
- fp.write(chunk)
- return dirname
-
node1, node2 = cmdutil.revpair(repo, opts['rev'])
files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
modified, added, removed, deleted, unknown = repo.status(
@@ -112,12 +113,34 @@
return 0
tmproot = tempfile.mkdtemp(prefix='extdiff.')
+ dir2root = ''
try:
- dir1 = snapshot_node(modified + removed, node1)
+ # Always make a copy of node1
+ dir1 = snapshot_node(ui, repo, modified + removed, node1, tmproot)
+ changes = len(modified) + len(removed) + len(added)
+
+ # If node2 in not the wc or there is >1 change, copy it
if node2:
- dir2 = snapshot_node(modified + added, node2)
+ dir2 = snapshot_node(ui, repo, modified + added, node2, tmproot)
+ elif changes > 1:
+ dir2 = snapshot_wdir(ui, repo, modified + added, tmproot)
else:
- dir2 = snapshot_wdir(modified + added)
+ # This lets the diff tool open the changed file directly
+ dir2 = ''
+ dir2root = repo.root
+
+ # If only one change, diff the files instead of the directories
+ if changes == 1 :
+ if len(modified):
+ dir1 = os.path.join(dir1, util.localpath(modified[0]))
+ dir2 = os.path.join(dir2root, dir2, util.localpath(modified[0]))
+ elif len(removed) :
+ dir1 = os.path.join(dir1, util.localpath(removed[0]))
+ dir2 = os.devnull
+ else:
+ dir1 = os.devnull
+ dir2 = os.path.join(dir2root, dir2, util.localpath(added[0]))
+
cmdline = ('%s %s %s %s' %
(util.shellquote(diffcmd), ' '.join(diffopts),
util.shellquote(dir1), util.shellquote(dir2)))
@@ -158,8 +181,7 @@
[('p', 'program', '', _('comparison program to run')),
('o', 'option', [], _('pass option to comparison program')),
('r', 'rev', [], _('revision')),
- ('I', 'include', [], _('include names matching the given patterns')),
- ('X', 'exclude', [], _('exclude names matching the given patterns'))],
+ ] + commands.walkopts,
_('hg extdiff [OPT]... [FILE]...')),
}
--- a/hgext/fetch.py Wed Aug 08 22:47:30 2007 +0200
+++ b/hgext/fetch.py Wed Aug 08 23:00:01 2007 +0200
@@ -23,29 +23,29 @@
if modheads == 0:
return 0
if modheads == 1:
- return hg.clean(repo, repo.changelog.tip(), wlock=wlock)
+ return hg.clean(repo, repo.changelog.tip())
newheads = repo.heads(parent)
newchildren = [n for n in repo.heads(parent) if n != parent]
newparent = parent
if newchildren:
newparent = newchildren[0]
- hg.clean(repo, newparent, wlock=wlock)
+ hg.clean(repo, newparent)
newheads = [n for n in repo.heads() if n != newparent]
err = False
if newheads:
ui.status(_('merging with new head %d:%s\n') %
(repo.changelog.rev(newheads[0]), short(newheads[0])))
- err = hg.merge(repo, newheads[0], remind=False, wlock=wlock)
+ err = hg.merge(repo, newheads[0], remind=False)
if not err and len(newheads) > 1:
ui.status(_('not merging with %d other new heads '
'(use "hg heads" and "hg merge" to merge them)') %
(len(newheads) - 1))
if not err:
- mod, add, rem = repo.status(wlock=wlock)[:3]
+ mod, add, rem = repo.status()[:3]
message = (cmdutil.logmessage(opts) or
(_('Automated merge with %s') % other.url()))
n = repo.commit(mod + add + rem, message,
- opts['user'], opts['date'], lock=lock, wlock=wlock,
+ opts['user'], opts['date'],
force_editor=opts.get('force_editor'))
ui.status(_('new changeset %d:%s merges remote changes '
'with local\n') % (repo.changelog.rev(n),
@@ -60,7 +60,7 @@
raise util.Abort(_("fetch -r doesn't work for remote repositories yet"))
elif opts['rev']:
revs = [other.lookup(rev) for rev in opts['rev']]
- modheads = repo.pull(other, heads=revs, lock=lock)
+ modheads = repo.pull(other, heads=revs)
return postincoming(other, modheads)
parent, p2 = repo.dirstate.parents()
@@ -69,10 +69,11 @@
'(use "hg update" to check out tip)'))
if p2 != nullid:
raise util.Abort(_('outstanding uncommitted merge'))
- wlock = repo.wlock()
- lock = repo.lock()
+ wlock = lock = None
try:
- mod, add, rem = repo.status(wlock=wlock)[:3]
+ wlock = repo.wlock()
+ lock = repo.lock()
+ mod, add, rem = repo.status()[:3]
if mod or add or rem:
raise util.Abort(_('outstanding uncommitted changes'))
if len(repo.heads()) > 1:
@@ -80,19 +81,13 @@
'(use "hg heads" and "hg merge" to merge)'))
return pull()
finally:
- lock.release()
- wlock.release()
+ del lock, wlock
cmdtable = {
'fetch':
(fetch,
- [('e', 'ssh', '', _('specify ssh command to use')),
- ('m', 'message', '', _('use <text> as commit message')),
- ('l', 'logfile', '', _('read the commit message from <file>')),
- ('d', 'date', '', _('record datecode as commit date')),
- ('u', 'user', '', _('record user as commiter')),
- ('r', 'rev', [], _('a specific revision you would like to pull')),
+ [('r', 'rev', [], _('a specific revision you would like to pull')),
('f', 'force-editor', None, _('edit commit message')),
- ('', 'remotecmd', '', _('hg command to run on the remote side'))],
+ ] + commands.commitopts + commands.commitopts2 + commands.remoteopts,
_('hg fetch [SOURCE]')),
}
--- a/hgext/gpg.py Wed Aug 08 22:47:30 2007 +0200
+++ b/hgext/gpg.py Wed Aug 08 23:00:01 2007 +0200
@@ -6,7 +6,7 @@
# of the GNU General Public License, incorporated herein by reference.
import os, tempfile, binascii
-from mercurial import util
+from mercurial import util, commands
from mercurial import node as hgnode
from mercurial.i18n import _
@@ -240,7 +240,7 @@
repo.wfile(".hgsigs", "ab").write(sigmessage)
- if repo.dirstate.state(".hgsigs") == '?':
+ if '.hgsigs' not in repo.dirstate:
repo.add([".hgsigs"])
if opts["no_commit"]:
@@ -269,10 +269,9 @@
[('l', 'local', None, _('make the signature local')),
('f', 'force', None, _('sign even if the sigfile is modified')),
('', 'no-commit', None, _('do not commit the sigfile after signing')),
+ ('k', 'key', '', _('the key id to sign with')),
('m', 'message', '', _('commit message')),
- ('d', 'date', '', _('date code')),
- ('u', 'user', '', _('user')),
- ('k', 'key', '', _('the key id to sign with'))],
+ ] + commands.commitopts2,
_('hg sign [OPTION]... [REVISION]...')),
"sigcheck": (check, [], _('hg sigcheck REVISION')),
"sigs": (sigs, [], _('hg sigs')),
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/hgext/imerge.py Wed Aug 08 23:00:01 2007 +0200
@@ -0,0 +1,361 @@
+# Copyright (C) 2007 Brendan Cully <brendan@kublai.com>
+# Published under the GNU GPL
+
+'''
+imerge - interactive merge
+'''
+
+from mercurial.i18n import _
+from mercurial.node import *
+from mercurial import commands, cmdutil, fancyopts, hg, merge, util
+import os, tarfile
+
+class InvalidStateFileException(Exception): pass
+
+class ImergeStateFile(object):
+ def __init__(self, im):
+ self.im = im
+
+ def save(self, dest):
+ tf = tarfile.open(dest, 'w:gz')
+
+ st = os.path.join(self.im.path, 'status')
+ tf.add(st, os.path.join('.hg', 'imerge', 'status'))
+
+ for f in self.im.resolved:
+ (fd, fo) = self.im.conflicts[f]
+ abssrc = self.im.repo.wjoin(fd)
+ tf.add(abssrc, fd)
+
+ tf.close()
+
+ def load(self, source):
+ wlock = self.im.repo.wlock()
+ lock = self.im.repo.lock()
+
+ tf = tarfile.open(source, 'r')
+ contents = tf.getnames()
+ statusfile = os.path.join('.hg', 'imerge', 'status')
+ if statusfile not in contents:
+ raise InvalidStateFileException('no status file')
+
+ tf.extract(statusfile, self.im.repo.root)
+ p1, p2 = self.im.load()
+ if self.im.repo.dirstate.parents()[0] != p1.node():
+ hg.clean(self.im.repo, p1.node())
+ self.im.start(p2.node())
+ for tarinfo in tf:
+ tf.extract(tarinfo, self.im.repo.root)
+ self.im.load()
+
+class Imerge(object):
+ def __init__(self, ui, repo):
+ self.ui = ui
+ self.repo = repo
+
+ self.path = repo.join('imerge')
+ self.opener = util.opener(self.path)
+
+ self.wctx = self.repo.workingctx()
+ self.conflicts = {}
+ self.resolved = []
+
+ def merging(self):
+ return len(self.wctx.parents()) > 1
+
+ def load(self):
+ # status format. \0-delimited file, fields are
+ # p1, p2, conflict count, conflict filenames, resolved filenames
+ # conflict filenames are tuples of localname, remoteorig, remotenew
+
+ statusfile = self.opener('status')
+
+ status = statusfile.read().split('\0')
+ if len(status) < 3:
+ raise util.Abort('invalid imerge status file')
+
+ try:
+ parents = [self.repo.changectx(n) for n in status[:2]]
+ except LookupError:
+ raise util.Abort('merge parent %s not in repository' % short(p))
+
+ status = status[2:]
+ conflicts = int(status.pop(0)) * 3
+ self.resolved = status[conflicts:]
+ for i in xrange(0, conflicts, 3):
+ self.conflicts[status[i]] = (status[i+1], status[i+2])
+
+ return parents
+
+ def save(self):
+ lock = self.repo.lock()
+
+ if not os.path.isdir(self.path):
+ os.mkdir(self.path)
+ statusfile = self.opener('status', 'wb')
+
+ out = [hex(n.node()) for n in self.wctx.parents()]
+ out.append(str(len(self.conflicts)))
+ conflicts = self.conflicts.items()
+ conflicts.sort()
+ for fw, fd_fo in conflicts:
+ out.append(fw)
+ out.extend(fd_fo)
+ out.extend(self.resolved)
+
+ statusfile.write('\0'.join(out))
+
+ def remaining(self):
+ return [f for f in self.conflicts if f not in self.resolved]
+
+ def filemerge(self, fn):
+ wlock = self.repo.wlock()
+
+ (fd, fo) = self.conflicts[fn]
+ p2 = self.wctx.parents()[1]
+ return merge.filemerge(self.repo, fn, fd, fo, self.wctx, p2)
+
+ def start(self, rev=None):
+ _filemerge = merge.filemerge
+ def filemerge(repo, fw, fd, fo, wctx, mctx):
+ self.conflicts[fw] = (fd, fo)
+
+ merge.filemerge = filemerge
+ commands.merge(self.ui, self.repo, rev=rev)
+ merge.filemerge = _filemerge
+
+ self.wctx = self.repo.workingctx()
+ self.save()
+
+ def resume(self):
+ self.load()
+
+ dp = self.repo.dirstate.parents()
+ p1, p2 = self.wctx.parents()
+ if p1.node() != dp[0] or p2.node() != dp[1]:
+ raise util.Abort('imerge state does not match working directory')
+
+ def next(self):
+ remaining = self.remaining()
+ return remaining and remaining[0]
+
+ def resolve(self, files):
+ resolved = dict.fromkeys(self.resolved)
+ for fn in files:
+ if fn not in self.conflicts:
+ raise util.Abort('%s is not in the merge set' % fn)
+ resolved[fn] = True
+ self.resolved = resolved.keys()
+ self.resolved.sort()
+ self.save()
+ return 0
+
+ def unresolve(self, files):
+ resolved = dict.fromkeys(self.resolved)
+ for fn in files:
+ if fn not in resolved:
+ raise util.Abort('%s is not resolved' % fn)
+ del resolved[fn]
+ self.resolved = resolved.keys()
+ self.resolved.sort()
+ self.save()
+ return 0
+
+ def pickle(self, dest):
+ '''write current merge state to file to be resumed elsewhere'''
+ state = ImergeStateFile(self)
+ return state.save(dest)
+
+ def unpickle(self, source):
+ '''read merge state from file'''
+ state = ImergeStateFile(self)
+ return state.load(source)
+
+def load(im, source):
+ if im.merging():
+ raise util.Abort('there is already a merge in progress '
+ '(update -C <rev> to abort it)' )
+ m, a, r, d = im.repo.status()[:4]
+ if m or a or r or d:
+ raise util.Abort('working directory has uncommitted changes')
+
+ rc = im.unpickle(source)
+ if not rc:
+ status(im)
+ return rc
+
+def merge_(im, filename=None):
+ if not filename:
+ filename = im.next()
+ if not filename:
+ im.ui.write('all conflicts resolved\n')
+ return 0
+
+ rc = im.filemerge(filename)
+ if not rc:
+ im.resolve([filename])
+ if not im.next():
+ im.ui.write('all conflicts resolved\n')
+ return 0
+ return rc
+
+def next(im):
+ n = im.next()
+ if n:
+ im.ui.write('%s\n' % n)
+ else:
+ im.ui.write('all conflicts resolved\n')
+ return 0
+
+def resolve(im, *files):
+ if not files:
+ raise util.Abort('resolve requires at least one filename')
+ return im.resolve(files)
+
+def save(im, dest):
+ return im.pickle(dest)
+
+def status(im, **opts):
+ if not opts.get('resolved') and not opts.get('unresolved'):
+ opts['resolved'] = True
+ opts['unresolved'] = True
+
+ if im.ui.verbose:
+ p1, p2 = [short(p.node()) for p in im.wctx.parents()]
+ im.ui.note(_('merging %s and %s\n') % (p1, p2))
+
+ conflicts = im.conflicts.keys()
+ conflicts.sort()
+ remaining = dict.fromkeys(im.remaining())
+ st = []
+ for fn in conflicts:
+ if opts.get('no_status'):
+ mode = ''
+ elif fn in remaining:
+ mode = 'U '
+ else:
+ mode = 'R '
+ if ((opts.get('resolved') and fn not in remaining)
+ or (opts.get('unresolved') and fn in remaining)):
+ st.append((mode, fn))
+ st.sort()
+ for (mode, fn) in st:
+ if im.ui.verbose:
+ fo, fd = im.conflicts[fn]
+ if fd != fn:
+ fn = '%s (%s)' % (fn, fd)
+ im.ui.write('%s%s\n' % (mode, fn))
+ if opts.get('unresolved') and not remaining:
+ im.ui.write(_('all conflicts resolved\n'))
+
+ return 0
+
+def unresolve(im, *files):
+ if not files:
+ raise util.Abort('unresolve requires at least one filename')
+ return im.unresolve(files)
+
+subcmdtable = {
+ 'load': (load, []),
+ 'merge': (merge_, []),
+ 'next': (next, []),
+ 'resolve': (resolve, []),
+ 'save': (save, []),
+ 'status': (status,
+ [('n', 'no-status', None, _('hide status prefix')),
+ ('', 'resolved', None, _('only show resolved conflicts')),
+ ('', 'unresolved', None, _('only show unresolved conflicts'))]),
+ 'unresolve': (unresolve, [])
+}
+
+def dispatch(im, args, opts):
+ def complete(s, choices):
+ candidates = []
+ for choice in choices:
+ if choice.startswith(s):
+ candidates.append(choice)
+ return candidates
+
+ c, args = args[0], list(args[1:])
+ cmd = complete(c, subcmdtable.keys())
+ if not cmd:
+ raise cmdutil.UnknownCommand('imerge ' + c)
+ if len(cmd) > 1:
+ cmd.sort()
+ raise cmdutil.AmbiguousCommand('imerge ' + c, cmd)
+ cmd = cmd[0]
+
+ func, optlist = subcmdtable[cmd]
+ opts = {}
+ try:
+ args = fancyopts.fancyopts(args, optlist, opts)
+ return func(im, *args, **opts)
+ except fancyopts.getopt.GetoptError, inst:
+ raise cmdutil.ParseError('imerge', '%s: %s' % (cmd, inst))
+ except TypeError:
+ raise cmdutil.ParseError('imerge', _('%s: invalid arguments') % cmd)
+
+def imerge(ui, repo, *args, **opts):
+ '''interactive merge
+
+ imerge lets you split a merge into pieces. When you start a merge
+ with imerge, the names of all files with conflicts are recorded.
+ You can then merge any of these files, and if the merge is
+ successful, they will be marked as resolved. When all files are
+ resolved, the merge is complete.
+
+ If no merge is in progress, hg imerge [rev] will merge the working
+ directory with rev (defaulting to the other head if the repository
+ only has two heads). You may also resume a saved merge with
+ hg imerge load <file>.
+
+ If a merge is in progress, hg imerge will default to merging the
+ next unresolved file.
+
+ The following subcommands are available:
+
+ status:
+ show the current state of the merge
+ next:
+ show the next unresolved file merge
+ merge [<file>]:
+ merge <file>. If the file merge is successful, the file will be
+ recorded as resolved. If no file is given, the next unresolved
+ file will be merged.
+ resolve <file>...:
+ mark files as successfully merged
+ unresolve <file>...:
+ mark files as requiring merging.
+ save <file>:
+ save the state of the merge to a file to be resumed elsewhere
+ load <file>:
+ load the state of the merge from a file created by save
+ '''
+
+ im = Imerge(ui, repo)
+
+ if im.merging():
+ im.resume()
+ else:
+ rev = opts.get('rev')
+ if rev and args:
+ raise util.Abort('please specify just one revision')
+
+ if len(args) == 2 and args[0] == 'load':
+ pass
+ else:
+ if args:
+ rev = args[0]
+ im.start(rev=rev)
+ args = ['status']
+
+ if not args:
+ args = ['merge']
+
+ return dispatch(im, args, opts)
+
+cmdtable = {
+ '^imerge':
+ (imerge,
+ [('r', 'rev', '', _('revision to merge'))], 'hg imerge [command]')
+}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/hgext/interhg.py Wed Aug 08 23:00:01 2007 +0200
@@ -0,0 +1,64 @@
+# interhg.py - interhg
+#
+# Copyright 2007 OHASHI Hideya <ohachige@gmail.com>
+#
+# This software may be used and distributed according to the terms
+# of the GNU General Public License, incorporated herein by reference.
+#
+# The `interhg' Mercurial extension allows you to change changelog and
+# summary text just like InterWiki way.
+#
+# To enable this extension:
+#
+# [extensions]
+# interhg =
+#
+# This is an example to link to a bug tracking system.
+#
+# [interhg]
+# pat1 = s/issue(\d+)/ <a href="http:\/\/bts\/issue\1">issue\1<\/a> /
+#
+# You can add patterns to use pat2, pat3, ...
+# For exapmle.
+#
+# pat2 = s/(^|\s)#(\d+)\b/ <b>#\2<\/b> /
+
+import re
+from mercurial.hgweb import hgweb_mod
+from mercurial import templater
+
+orig_escape = templater.common_filters["escape"]
+
+interhg_table = []
+
+def interhg_escape(x):
+ escstr = orig_escape(x)
+ for pat in interhg_table:
+ regexp = pat[0]
+ format = pat[1]
+ escstr = regexp.sub(format, escstr)
+ return escstr
+
+templater.common_filters["escape"] = interhg_escape
+
+orig_refresh = hgweb_mod.hgweb.refresh
+
+def interhg_refresh(self):
+ interhg_table[:] = []
+ num = 1
+ while True:
+ key = 'pat%d' % num
+ pat = self.config('interhg', key)
+ if pat == None:
+ break
+ pat = pat[2:-1]
+ span = re.search(r'[^\\]/', pat).span()
+ regexp = pat[:span[0] + 1]
+ format = pat[span[1]:]
+ format = re.sub(r'\\/', '/', format)
+ regexp = re.compile(regexp)
+ interhg_table.append((regexp, format))
+ num += 1
+ return orig_refresh(self)
+
+hgweb_mod.hgweb.refresh = interhg_refresh
--- a/hgext/mq.py Wed Aug 08 22:47:30 2007 +0200
+++ b/hgext/mq.py Wed Aug 08 23:00:01 2007 +0200
@@ -323,10 +323,10 @@
patch.diff(repo, node1, node2, fns, match=matchfn,
fp=fp, changes=changes, opts=self.diffopts())
- def mergeone(self, repo, mergeq, head, patch, rev, wlock):
+ def mergeone(self, repo, mergeq, head, patch, rev):
# first try just applying the patch
(err, n) = self.apply(repo, [ patch ], update_status=False,
- strict=True, merge=rev, wlock=wlock)
+ strict=True, merge=rev)
if err == 0:
return (err, n)
@@ -337,15 +337,14 @@
self.ui.warn("patch didn't work out, merging %s\n" % patch)
# apply failed, strip away that rev and merge.
- hg.clean(repo, head, wlock=wlock)
- self.strip(repo, n, update=False, backup='strip', wlock=wlock)
+ hg.clean(repo, head)
+ self.strip(repo, n, update=False, backup='strip')
ctx = repo.changectx(rev)
- ret = hg.merge(repo, rev, wlock=wlock)
+ ret = hg.merge(repo, rev)
if ret:
raise util.Abort(_("update returned %d") % ret)
- n = repo.commit(None, ctx.description(), ctx.user(),
- force=1, wlock=wlock)
+ n = repo.commit(None, ctx.description(), ctx.user(), force=1)
if n == None:
raise util.Abort(_("repo commit failed"))
try:
@@ -381,7 +380,7 @@
return pp[1]
return pp[0]
- def mergepatch(self, repo, mergeq, series, wlock):
+ def mergepatch(self, repo, mergeq, series):
if len(self.applied) == 0:
# each of the patches merged in will have two parents. This
# can confuse the qrefresh, qdiff, and strip code because it
@@ -390,8 +389,7 @@
# the first patch in the queue is never a merge patch
#
pname = ".hg.patches.merge.marker"
- n = repo.commit(None, '[mq]: merge marker', user=None, force=1,
- wlock=wlock)
+ n = repo.commit(None, '[mq]: merge marker', user=None, force=1)
self.removeundo(repo)
self.applied.append(statusentry(revlog.hex(n), pname))
self.applied_dirty = 1
@@ -412,7 +410,7 @@
self.ui.warn("patch %s is not applied\n" % patch)
return (1, None)
rev = revlog.bin(info[1])
- (err, head) = self.mergeone(repo, mergeq, head, patch, rev, wlock)
+ (err, head) = self.mergeone(repo, mergeq, head, patch, rev)
if head:
self.applied.append(statusentry(revlog.hex(head), patch))
self.applied_dirty = 1
@@ -437,30 +435,30 @@
return (True, files, fuzz)
def apply(self, repo, series, list=False, update_status=True,
- strict=False, patchdir=None, merge=None, wlock=None,
- all_files={}):
- if not wlock:
- wlock = repo.wlock()
- lock = repo.lock()
- tr = repo.transaction()
+ strict=False, patchdir=None, merge=None, all_files={}):
+ wlock = lock = tr = None
try:
- ret = self._apply(tr, repo, series, list, update_status,
- strict, patchdir, merge, wlock,
- lock=lock, all_files=all_files)
- tr.close()
- self.save_dirty()
- return ret
- except:
+ wlock = repo.wlock()
+ lock = repo.lock()
+ tr = repo.transaction()
try:
- tr.abort()
- finally:
- repo.invalidate()
- repo.dirstate.invalidate()
- raise
+ ret = self._apply(repo, series, list, update_status,
+ strict, patchdir, merge, all_files=all_files)
+ tr.close()
+ self.save_dirty()
+ return ret
+ except:
+ try:
+ tr.abort()
+ finally:
+ repo.invalidate()
+ repo.dirstate.invalidate()
+ raise
+ finally:
+ del tr, lock, wlock
- def _apply(self, tr, repo, series, list=False, update_status=True,
- strict=False, patchdir=None, merge=None, wlock=None,
- lock=None, all_files={}):
+ def _apply(self, repo, series, list=False, update_status=True,
+ strict=False, patchdir=None, merge=None, all_files={}):
# TODO unify with commands.py
if not patchdir:
patchdir = self.path
@@ -497,17 +495,18 @@
removed = []
merged = []
for f in files:
- if os.path.exists(repo.dirstate.wjoin(f)):
+ if os.path.exists(repo.wjoin(f)):
merged.append(f)
else:
removed.append(f)
- repo.dirstate.update(repo.dirstate.filterfiles(removed), 'r')
- repo.dirstate.update(repo.dirstate.filterfiles(merged), 'm')
+ for f in removed:
+ repo.dirstate.remove(f)
+ for f in merged:
+ repo.dirstate.merge(f)
p1, p2 = repo.dirstate.parents()
repo.dirstate.setparents(p1, merge)
- files = patch.updatedir(self.ui, repo, files, wlock=wlock)
- n = repo.commit(files, message, user, date, force=1, lock=lock,
- wlock=wlock)
+ files = patch.updatedir(self.ui, repo, files)
+ n = repo.commit(files, message, user, date, force=1)
if n == None:
raise util.Abort(_("repo commit failed"))
@@ -614,44 +613,49 @@
commitfiles = m + a + r
self.check_toppatch(repo)
wlock = repo.wlock()
- insert = self.full_series_end()
- if msg:
- n = repo.commit(commitfiles, msg, force=True, wlock=wlock)
- else:
- n = repo.commit(commitfiles,
- "[mq]: %s" % patch, force=True, wlock=wlock)
- if n == None:
- raise util.Abort(_("repo commit failed"))
- self.full_series[insert:insert] = [patch]
- self.applied.append(statusentry(revlog.hex(n), patch))
- self.parse_series()
- self.series_dirty = 1
- self.applied_dirty = 1
- p = self.opener(patch, "w")
- if msg:
- msg = msg + "\n"
- p.write(msg)
- p.close()
- wlock = None
- r = self.qrepo()
- if r: r.add([patch])
- if commitfiles:
- self.refresh(repo, short=True)
- self.removeundo(repo)
+ try:
+ insert = self.full_series_end()
+ if msg:
+ n = repo.commit(commitfiles, msg, force=True)
+ else:
+ n = repo.commit(commitfiles, "[mq]: %s" % patch, force=True)
+ if n == None:
+ raise util.Abort(_("repo commit failed"))
+ self.full_series[insert:insert] = [patch]
+ self.applied.append(statusentry(revlog.hex(n), patch))
+ self.parse_series()
+ self.series_dirty = 1
+ self.applied_dirty = 1
+ p = self.opener(patch, "w")
+ if msg:
+ msg = msg + "\n"
+ p.write(msg)
+ p.close()
+ wlock = None
+ r = self.qrepo()
+ if r: r.add([patch])
+ if commitfiles:
+ self.refresh(repo, short=True, git=opts.get('git'))
+ self.removeundo(repo)
+ finally:
+ del wlock
- def strip(self, repo, rev, update=True, backup="all", wlock=None):
- if not wlock:
+ def strip(self, repo, rev, update=True, backup="all"):
+ wlock = lock = None
+ try:
wlock = repo.wlock()
- lock = repo.lock()
+ lock = repo.lock()
- if update:
- self.check_localchanges(repo, refresh=False)
- urev = self.qparents(repo, rev)
- hg.clean(repo, urev, wlock=wlock)
- repo.dirstate.write()
+ if update:
+ self.check_localchanges(repo, refresh=False)
+ urev = self.qparents(repo, rev)
+ hg.clean(repo, urev)
+ repo.dirstate.write()
- self.removeundo(repo)
- repair.strip(self.ui, repo, rev, backup)
+ self.removeundo(repo)
+ repair.strip(self.ui, repo, rev, backup)
+ finally:
+ del lock, wlock
def isapplied(self, patch):
"""returns (index, rev, patch)"""
@@ -735,157 +739,161 @@
raise util.Abort(_("patch %s not in series") % patch)
def push(self, repo, patch=None, force=False, list=False,
- mergeq=None, wlock=None):
- if not wlock:
- wlock = repo.wlock()
- patch = self.lookup(patch)
- # Suppose our series file is: A B C and the current 'top' patch is B.
- # qpush C should be performed (moving forward)
- # qpush B is a NOP (no change)
- # qpush A is an error (can't go backwards with qpush)
- if patch:
- info = self.isapplied(patch)
- if info:
- if info[0] < len(self.applied) - 1:
- raise util.Abort(_("cannot push to a previous patch: %s") %
- patch)
- if info[0] < len(self.series) - 1:
- self.ui.warn(_('qpush: %s is already at the top\n') % patch)
- else:
- self.ui.warn(_('all patches are currently applied\n'))
- return
+ mergeq=None):
+ wlock = repo.wlock()
+ try:
+ patch = self.lookup(patch)
+ # Suppose our series file is: A B C and the current 'top'
+ # patch is B. qpush C should be performed (moving forward)
+ # qpush B is a NOP (no change) qpush A is an error (can't
+ # go backwards with qpush)
+ if patch:
+ info = self.isapplied(patch)
+ if info:
+ if info[0] < len(self.applied) - 1:
+ raise util.Abort(
+ _("cannot push to a previous patch: %s") % patch)
+ if info[0] < len(self.series) - 1:
+ self.ui.warn(
+ _('qpush: %s is already at the top\n') % patch)
+ else:
+ self.ui.warn(_('all patches are currently applied\n'))
+ return
- # Following the above example, starting at 'top' of B:
- # qpush should be performed (pushes C), but a subsequent qpush without
- # an argument is an error (nothing to apply). This allows a loop
- # of "...while hg qpush..." to work as it detects an error when done
- if self.series_end() == len(self.series):
- self.ui.warn(_('patch series already fully applied\n'))
- return 1
- if not force:
- self.check_localchanges(repo)
+ # Following the above example, starting at 'top' of B:
+ # qpush should be performed (pushes C), but a subsequent
+ # qpush without an argument is an error (nothing to
+ # apply). This allows a loop of "...while hg qpush..." to
+ # work as it detects an error when done
+ if self.series_end() == len(self.series):
+ self.ui.warn(_('patch series already fully applied\n'))
+ return 1
+ if not force:
+ self.check_localchanges(repo)
- self.applied_dirty = 1;
- start = self.series_end()
- if start > 0:
- self.check_toppatch(repo)
- if not patch:
- patch = self.series[start]
- end = start + 1
- else:
- end = self.series.index(patch, start) + 1
- s = self.series[start:end]
- all_files = {}
- try:
- if mergeq:
- ret = self.mergepatch(repo, mergeq, s, wlock)
+ self.applied_dirty = 1;
+ start = self.series_end()
+ if start > 0:
+ self.check_toppatch(repo)
+ if not patch:
+ patch = self.series[start]
+ end = start + 1
else:
- ret = self.apply(repo, s, list, wlock=wlock,
- all_files=all_files)
- except:
- self.ui.warn(_('cleaning up working directory...'))
- node = repo.dirstate.parents()[0]
- hg.revert(repo, node, None, wlock)
- unknown = repo.status(wlock=wlock)[4]
- # only remove unknown files that we know we touched or
- # created while patching
- for f in unknown:
- if f in all_files:
- util.unlink(repo.wjoin(f))
- self.ui.warn(_('done\n'))
- raise
- top = self.applied[-1].name
- if ret[0]:
- self.ui.write("Errors during apply, please fix and refresh %s\n" %
- top)
- else:
- self.ui.write("Now at: %s\n" % top)
- return ret[0]
+ end = self.series.index(patch, start) + 1
+ s = self.series[start:end]
+ all_files = {}
+ try:
+ if mergeq:
+ ret = self.mergepatch(repo, mergeq, s)
+ else:
+ ret = self.apply(repo, s, list, all_files=all_files)
+ except:
+ self.ui.warn(_('cleaning up working directory...'))
+ node = repo.dirstate.parents()[0]
+ hg.revert(repo, node, None)
+ unknown = repo.status()[4]
+ # only remove unknown files that we know we touched or
+ # created while patching
+ for f in unknown:
+ if f in all_files:
+ util.unlink(repo.wjoin(f))
+ self.ui.warn(_('done\n'))
+ raise
+ top = self.applied[-1].name
+ if ret[0]:
+ self.ui.write(
+ "Errors during apply, please fix and refresh %s\n" % top)
+ else:
+ self.ui.write("Now at: %s\n" % top)
+ return ret[0]
+ finally:
+ del wlock
- def pop(self, repo, patch=None, force=False, update=True, all=False,
- wlock=None):
+ def pop(self, repo, patch=None, force=False, update=True, all=False):
def getfile(f, rev):
t = repo.file(f).read(rev)
repo.wfile(f, "w").write(t)
- if not wlock:
- wlock = repo.wlock()
- if patch:
- # index, rev, patch
- info = self.isapplied(patch)
- if not info:
- patch = self.lookup(patch)
- info = self.isapplied(patch)
- if not info:
- raise util.Abort(_("patch %s is not applied") % patch)
+ wlock = repo.wlock()
+ try:
+ if patch:
+ # index, rev, patch
+ info = self.isapplied(patch)
+ if not info:
+ patch = self.lookup(patch)
+ info = self.isapplied(patch)
+ if not info:
+ raise util.Abort(_("patch %s is not applied") % patch)
- if len(self.applied) == 0:
- # Allow qpop -a to work repeatedly,
- # but not qpop without an argument
- self.ui.warn(_("no patches applied\n"))
- return not all
+ if len(self.applied) == 0:
+ # Allow qpop -a to work repeatedly,
+ # but not qpop without an argument
+ self.ui.warn(_("no patches applied\n"))
+ return not all
- if not update:
- parents = repo.dirstate.parents()
- rr = [ revlog.bin(x.rev) for x in self.applied ]
- for p in parents:
- if p in rr:
- self.ui.warn("qpop: forcing dirstate update\n")
- update = True
+ if not update:
+ parents = repo.dirstate.parents()
+ rr = [ revlog.bin(x.rev) for x in self.applied ]
+ for p in parents:
+ if p in rr:
+ self.ui.warn("qpop: forcing dirstate update\n")
+ update = True
- if not force and update:
- self.check_localchanges(repo)
+ if not force and update:
+ self.check_localchanges(repo)
- self.applied_dirty = 1;
- end = len(self.applied)
- if not patch:
- if all:
- popi = 0
+ self.applied_dirty = 1;
+ end = len(self.applied)
+ if not patch:
+ if all:
+ popi = 0
+ else:
+ popi = len(self.applied) - 1
else:
- popi = len(self.applied) - 1
- else:
- popi = info[0] + 1
- if popi >= end:
- self.ui.warn("qpop: %s is already at the top\n" % patch)
- return
- info = [ popi ] + [self.applied[popi].rev, self.applied[popi].name]
+ popi = info[0] + 1
+ if popi >= end:
+ self.ui.warn("qpop: %s is already at the top\n" % patch)
+ return
+ info = [ popi ] + [self.applied[popi].rev, self.applied[popi].name]
- start = info[0]
- rev = revlog.bin(info[1])
+ start = info[0]
+ rev = revlog.bin(info[1])
- # we know there are no local changes, so we can make a simplified
- # form of hg.update.
- if update:
- top = self.check_toppatch(repo)
- qp = self.qparents(repo, rev)
- changes = repo.changelog.read(qp)
- mmap = repo.manifest.read(changes[0])
- m, a, r, d, u = repo.status(qp, top)[:5]
- if d:
- raise util.Abort("deletions found between repo revs")
- for f in m:
- getfile(f, mmap[f])
- for f in r:
- getfile(f, mmap[f])
- util.set_exec(repo.wjoin(f), mmap.execf(f))
- repo.dirstate.update(m + r, 'n')
- for f in a:
- try:
- os.unlink(repo.wjoin(f))
- except OSError, e:
- if e.errno != errno.ENOENT:
- raise
- try: os.removedirs(os.path.dirname(repo.wjoin(f)))
- except: pass
- if a:
- repo.dirstate.forget(a)
- repo.dirstate.setparents(qp, revlog.nullid)
- self.strip(repo, rev, update=False, backup='strip', wlock=wlock)
- del self.applied[start:end]
- if len(self.applied):
- self.ui.write("Now at: %s\n" % self.applied[-1].name)
- else:
- self.ui.write("Patch queue now empty\n")
+ # we know there are no local changes, so we can make a simplified
+ # form of hg.update.
+ if update:
+ top = self.check_toppatch(repo)
+ qp = self.qparents(repo, rev)
+ changes = repo.changelog.read(qp)
+ mmap = repo.manifest.read(changes[0])
+ m, a, r, d, u = repo.status(qp, top)[:5]
+ if d:
+ raise util.Abort("deletions found between repo revs")
+ for f in m:
+ getfile(f, mmap[f])
+ for f in r:
+ getfile(f, mmap[f])
+ util.set_exec(repo.wjoin(f), mmap.execf(f))
+ for f in m + r:
+ repo.dirstate.normal(f)
+ for f in a:
+ try:
+ os.unlink(repo.wjoin(f))
+ except OSError, e:
+ if e.errno != errno.ENOENT:
+ raise
+ try: os.removedirs(os.path.dirname(repo.wjoin(f)))
+ except: pass
+ repo.dirstate.forget(f)
+ repo.dirstate.setparents(qp, revlog.nullid)
+ self.strip(repo, rev, update=False, backup='strip')
+ del self.applied[start:end]
+ if len(self.applied):
+ self.ui.write("Now at: %s\n" % self.applied[-1].name)
+ else:
+ self.ui.write("Patch queue now empty\n")
+ finally:
+ del wlock
def diff(self, repo, pats, opts):
top = self.check_toppatch(repo)
@@ -902,175 +910,184 @@
self.ui.write("No patches applied\n")
return 1
wlock = repo.wlock()
- self.check_toppatch(repo)
- (top, patchfn) = (self.applied[-1].rev, self.applied[-1].name)
- top = revlog.bin(top)
- cparents = repo.changelog.parents(top)
- patchparent = self.qparents(repo, top)
- message, comments, user, date, patchfound = self.readheaders(patchfn)
-
- patchf = self.opener(patchfn, 'r+')
-
- # if the patch was a git patch, refresh it as a git patch
- for line in patchf:
- if line.startswith('diff --git'):
- self.diffopts().git = True
- break
- patchf.seek(0)
- patchf.truncate()
+ try:
+ self.check_toppatch(repo)
+ (top, patchfn) = (self.applied[-1].rev, self.applied[-1].name)
+ top = revlog.bin(top)
+ cparents = repo.changelog.parents(top)
+ patchparent = self.qparents(repo, top)
+ message, comments, user, date, patchfound = self.readheaders(patchfn)
- msg = opts.get('msg', '').rstrip()
- if msg:
- if comments:
- # Remove existing message.
- ci = 0
- subj = None
- for mi in xrange(len(message)):
- if comments[ci].lower().startswith('subject: '):
- subj = comments[ci][9:]
- while message[mi] != comments[ci] and message[mi] != subj:
- ci += 1
- del comments[ci]
- comments.append(msg)
- if comments:
- comments = "\n".join(comments) + '\n\n'
- patchf.write(comments)
+ patchf = self.opener(patchfn, 'r+')
+
+ # if the patch was a git patch, refresh it as a git patch
+ for line in patchf:
+ if line.startswith('diff --git'):
+ self.diffopts().git = True
+ break
+ patchf.seek(0)
+ patchf.truncate()
- if opts.get('git'):
- self.diffopts().git = True
- fns, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
- tip = repo.changelog.tip()
- if top == tip:
- # if the top of our patch queue is also the tip, there is an
- # optimization here. We update the dirstate in place and strip
- # off the tip commit. Then just commit the current directory
- # tree. We can also send repo.commit the list of files
- # changed to speed up the diff
- #
- # in short mode, we only diff the files included in the
- # patch already
- #
- # this should really read:
- # mm, dd, aa, aa2, uu = repo.status(tip, patchparent)[:5]
- # but we do it backwards to take advantage of manifest/chlog
- # caching against the next repo.status call
- #
- mm, aa, dd, aa2, uu = repo.status(patchparent, tip)[:5]
- changes = repo.changelog.read(tip)
- man = repo.manifest.read(changes[0])
- aaa = aa[:]
- if opts.get('short'):
- filelist = mm + aa + dd
- match = dict.fromkeys(filelist).__contains__
- else:
- filelist = None
- match = util.always
- m, a, r, d, u = repo.status(files=filelist, match=match)[:5]
+ msg = opts.get('msg', '').rstrip()
+ if msg:
+ if comments:
+ # Remove existing message.
+ ci = 0
+ subj = None
+ for mi in xrange(len(message)):
+ if comments[ci].lower().startswith('subject: '):
+ subj = comments[ci][9:]
+ while message[mi] != comments[ci] and message[mi] != subj:
+ ci += 1
+ del comments[ci]
+ comments.append(msg)
+ if comments:
+ comments = "\n".join(comments) + '\n\n'
+ patchf.write(comments)
- # we might end up with files that were added between tip and
- # the dirstate parent, but then changed in the local dirstate.
- # in this case, we want them to only show up in the added section
- for x in m:
- if x not in aa:
- mm.append(x)
- # we might end up with files added by the local dirstate that
- # were deleted by the patch. In this case, they should only
- # show up in the changed section.
- for x in a:
- if x in dd:
- del dd[dd.index(x)]
- mm.append(x)
+ if opts.get('git'):
+ self.diffopts().git = True
+ fns, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
+ tip = repo.changelog.tip()
+ if top == tip:
+ # if the top of our patch queue is also the tip, there is an
+ # optimization here. We update the dirstate in place and strip
+ # off the tip commit. Then just commit the current directory
+ # tree. We can also send repo.commit the list of files
+ # changed to speed up the diff
+ #
+ # in short mode, we only diff the files included in the
+ # patch already
+ #
+ # this should really read:
+ # mm, dd, aa, aa2, uu = repo.status(tip, patchparent)[:5]
+ # but we do it backwards to take advantage of manifest/chlog
+ # caching against the next repo.status call
+ #
+ mm, aa, dd, aa2, uu = repo.status(patchparent, tip)[:5]
+ changes = repo.changelog.read(tip)
+ man = repo.manifest.read(changes[0])
+ aaa = aa[:]
+ if opts.get('short'):
+ filelist = mm + aa + dd
+ match = dict.fromkeys(filelist).__contains__
else:
- aa.append(x)
- # make sure any files deleted in the local dirstate
- # are not in the add or change column of the patch
- forget = []
- for x in d + r:
- if x in aa:
- del aa[aa.index(x)]
- forget.append(x)
- continue
- elif x in mm:
- del mm[mm.index(x)]
- dd.append(x)
+ filelist = None
+ match = util.always
+ m, a, r, d, u = repo.status(files=filelist, match=match)[:5]
- m = util.unique(mm)
- r = util.unique(dd)
- a = util.unique(aa)
- c = [filter(matchfn, l) for l in (m, a, r, [], u)]
- filelist = util.unique(c[0] + c[1] + c[2])
- patch.diff(repo, patchparent, files=filelist, match=matchfn,
- fp=patchf, changes=c, opts=self.diffopts())
- patchf.close()
+ # we might end up with files that were added between
+ # tip and the dirstate parent, but then changed in the
+ # local dirstate. in this case, we want them to only
+ # show up in the added section
+ for x in m:
+ if x not in aa:
+ mm.append(x)
+ # we might end up with files added by the local dirstate that
+ # were deleted by the patch. In this case, they should only
+ # show up in the changed section.
+ for x in a:
+ if x in dd:
+ del dd[dd.index(x)]
+ mm.append(x)
+ else:
+ aa.append(x)
+ # make sure any files deleted in the local dirstate
+ # are not in the add or change column of the patch
+ forget = []
+ for x in d + r:
+ if x in aa:
+ del aa[aa.index(x)]
+ forget.append(x)
+ continue
+ elif x in mm:
+ del mm[mm.index(x)]
+ dd.append(x)
+
+ m = util.unique(mm)
+ r = util.unique(dd)
+ a = util.unique(aa)
+ c = [filter(matchfn, l) for l in (m, a, r, [], u)]
+ filelist = util.unique(c[0] + c[1] + c[2])
+ patch.diff(repo, patchparent, files=filelist, match=matchfn,
+ fp=patchf, changes=c, opts=self.diffopts())
+ patchf.close()
- repo.dirstate.setparents(*cparents)
- copies = {}
- for dst in a:
- src = repo.dirstate.copied(dst)
- if src is None:
- continue
- copies.setdefault(src, []).append(dst)
- repo.dirstate.update(a, 'a')
- # remember the copies between patchparent and tip
- # this may be slow, so don't do it if we're not tracking copies
- if self.diffopts().git:
- for dst in aaa:
- f = repo.file(dst)
- src = f.renamed(man[dst])
- if src:
- copies[src[0]] = copies.get(dst, [])
- if dst in a:
- copies[src[0]].append(dst)
- # we can't copy a file created by the patch itself
- if dst in copies:
- del copies[dst]
- for src, dsts in copies.iteritems():
- for dst in dsts:
- repo.dirstate.copy(src, dst)
- repo.dirstate.update(r, 'r')
- # if the patch excludes a modified file, mark that file with mtime=0
- # so status can see it.
- mm = []
- for i in xrange(len(m)-1, -1, -1):
- if not matchfn(m[i]):
- mm.append(m[i])
- del m[i]
- repo.dirstate.update(m, 'n')
- repo.dirstate.update(mm, 'n', st_mtime=-1, st_size=-1)
- repo.dirstate.forget(forget)
+ repo.dirstate.setparents(*cparents)
+ copies = {}
+ for dst in a:
+ src = repo.dirstate.copied(dst)
+ if src is None:
+ continue
+ copies.setdefault(src, []).append(dst)
+ repo.dirstate.add(dst)
+ # remember the copies between patchparent and tip
+ # this may be slow, so don't do it if we're not tracking copies
+ if self.diffopts().git:
+ for dst in aaa:
+ f = repo.file(dst)
+ src = f.renamed(man[dst])
+ if src:
+ copies[src[0]] = copies.get(dst, [])
+ if dst in a:
+ copies[src[0]].append(dst)
+ # we can't copy a file created by the patch itself
+ if dst in copies:
+ del copies[dst]
+ for src, dsts in copies.iteritems():
+ for dst in dsts:
+ repo.dirstate.copy(src, dst)
+ for f in r:
+ repo.dirstate.remove(f)
+ # if the patch excludes a modified file, mark that
+ # file with mtime=0 so status can see it.
+ mm = []
+ for i in xrange(len(m)-1, -1, -1):
+ if not matchfn(m[i]):
+ mm.append(m[i])
+ del m[i]
+ for f in m:
+ repo.dirstate.normal(f)
+ for f in mm:
+ repo.dirstate.normaldirty(f)
+ for f in forget:
+ repo.dirstate.forget(f)
- if not msg:
- if not message:
- message = "[mq]: %s\n" % patchfn
+ if not msg:
+ if not message:
+ message = "[mq]: %s\n" % patchfn
+ else:
+ message = "\n".join(message)
else:
- message = "\n".join(message)
- else:
- message = msg
+ message = msg
- self.strip(repo, top, update=False, backup='strip', wlock=wlock)
- n = repo.commit(filelist, message, changes[1], match=matchfn,
- force=1, wlock=wlock)
- self.applied[-1] = statusentry(revlog.hex(n), patchfn)
- self.applied_dirty = 1
- self.removeundo(repo)
- else:
- self.printdiff(repo, patchparent, fp=patchf)
- patchf.close()
- added = repo.status()[1]
- for a in added:
- f = repo.wjoin(a)
- try:
- os.unlink(f)
- except OSError, e:
- if e.errno != errno.ENOENT:
- raise
- try: os.removedirs(os.path.dirname(f))
- except: pass
- # forget the file copies in the dirstate
- # push should readd the files later on
- repo.dirstate.forget(added)
- self.pop(repo, force=True, wlock=wlock)
- self.push(repo, force=True, wlock=wlock)
+ self.strip(repo, top, update=False,
+ backup='strip')
+ n = repo.commit(filelist, message, changes[1], match=matchfn,
+ force=1)
+ self.applied[-1] = statusentry(revlog.hex(n), patchfn)
+ self.applied_dirty = 1
+ self.removeundo(repo)
+ else:
+ self.printdiff(repo, patchparent, fp=patchf)
+ patchf.close()
+ added = repo.status()[1]
+ for a in added:
+ f = repo.wjoin(a)
+ try:
+ os.unlink(f)
+ except OSError, e:
+ if e.errno != errno.ENOENT:
+ raise
+ try: os.removedirs(os.path.dirname(f))
+ except: pass
+ # forget the file copies in the dirstate
+ # push should readd the files later on
+ repo.dirstate.forget(a)
+ self.pop(repo, force=True)
+ self.push(repo, force=True)
+ finally:
+ del wlock
def init(self, repo, create=False):
if not create and os.path.isdir(self.path):
@@ -1487,11 +1504,20 @@
Source patch repository is looked for in <src>/.hg/patches by
default. Use -p <url> to change.
+
+ The patch directory must be a nested mercurial repository, as
+ would be created by qinit -c.
'''
cmdutil.setremoteconfig(ui, opts)
if dest is None:
dest = hg.defaultdest(source)
sr = hg.repository(ui, ui.expandpath(source))
+ patchdir = opts['patches'] or (sr.url() + '/.hg/patches')
+ try:
+ pr = hg.repository(ui, patchdir)
+ except hg.RepoError:
+ raise util.Abort(_('versioned patch repository not found'
+ ' (see qinit -c)'))
qbase, destrev = None, None
if sr.local():
if sr.mq.applied:
@@ -1857,10 +1883,13 @@
r = q.qrepo()
if r:
wlock = r.wlock()
- if r.dirstate.state(name) == 'r':
- r.undelete([name], wlock)
- r.copy(patch, name, wlock)
- r.remove([patch], False, wlock)
+ try:
+ if r.dirstate[name] == 'r':
+ r.undelete([name])
+ r.copy(patch, name)
+ r.remove([patch], False)
+ finally:
+ del wlock
q.save_dirty()
@@ -2102,10 +2131,8 @@
('U', 'noupdate', None, _('do not update the new working directories')),
('', 'uncompressed', None,
_('use uncompressed transfer (fast over LAN)')),
- ('e', 'ssh', '', _('specify ssh command to use')),
('p', 'patches', '', _('location of source patch repo')),
- ('', 'remotecmd', '',
- _('specify hg command to run on the remote side'))],
+ ] + commands.remoteopts,
_('hg qclone [OPTION]... SOURCE [DEST]')),
"qcommit|qci":
(commit,
@@ -2114,8 +2141,7 @@
"^qdiff":
(diff,
[('g', 'git', None, _('use git extended diff format')),
- ('I', 'include', [], _('include names matching the given patterns')),
- ('X', 'exclude', [], _('exclude names matching the given patterns'))],
+ ] + commands.walkopts,
_('hg qdiff [-I] [-X] [-g] [FILE]...')),
"qdelete|qremove|qrm":
(delete,
@@ -2154,9 +2180,8 @@
(new,
[('e', 'edit', None, _('edit commit message')),
('f', 'force', None, _('import uncommitted changes into patch')),
- ('I', 'include', [], _('include names matching the given patterns')),
- ('X', 'exclude', [], _('exclude names matching the given patterns')),
- ] + commands.commitopts,
+ ('g', 'git', None, _('use git extended diff format')),
+ ] + commands.walkopts + commands.commitopts,
_('hg qnew [-e] [-m TEXT] [-l FILE] [-f] PATCH [FILE]...')),
"qnext": (next, [] + seriesopts, _('hg qnext [-s]')),
"qprev": (prev, [] + seriesopts, _('hg qprev [-s]')),
@@ -2179,9 +2204,7 @@
[('e', 'edit', None, _('edit commit message')),
('g', 'git', None, _('use git extended diff format')),
('s', 'short', None, _('refresh only files already in the patch')),
- ('I', 'include', [], _('include names matching the given patterns')),
- ('X', 'exclude', [], _('exclude names matching the given patterns')),
- ] + commands.commitopts,
+ ] + commands.walkopts + commands.commitopts,
_('hg qrefresh [-I] [-X] [-e] [-m TEXT] [-l FILE] [-s] [FILE]...')),
'qrename|qmv':
(rename, [], _('hg qrename PATCH1 [PATCH2]')),
--- a/hgext/patchbomb.py Wed Aug 08 22:47:30 2007 +0200
+++ b/hgext/patchbomb.py Wed Aug 08 23:00:01 2007 +0200
@@ -306,8 +306,12 @@
d = cdiffstat(_('Final summary:\n'), jumbo)
if d: body = '\n' + d
- ui.write(_('\nWrite the introductory message for the patch series.\n\n'))
- body = ui.edit(body, sender)
+ if opts['desc']:
+ body = open(opts['desc']).read()
+ else:
+ ui.write(_('\nWrite the introductory message for the '
+ 'patch series.\n\n'))
+ body = ui.edit(body, sender)
msg = email.MIMEText.MIMEText(body)
msg['Subject'] = subj
@@ -417,6 +421,7 @@
('c', 'cc', [], _('email addresses of copy recipients')),
('d', 'diffstat', None, _('add diffstat output to messages')),
('', 'date', '', _('use the given date as the sending date')),
+ ('', 'desc', '', _('use the given file as the series description')),
('g', 'git', None, _('use git extended diff format')),
('f', 'from', '', _('email address of sender')),
('', 'plain', None, _('omit hg patch header')),
--- a/hgext/purge.py Wed Aug 08 22:47:30 2007 +0200
+++ b/hgext/purge.py Wed Aug 08 23:00:01 2007 +0200
@@ -27,7 +27,7 @@
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
-from mercurial import hg, util
+from mercurial import hg, util, commands
from mercurial.i18n import _
import os
@@ -162,7 +162,6 @@
('p', 'print', None, _('print the file names instead of deleting them')),
('0', 'print0', None, _('end filenames with NUL, for use with xargs'
' (implies -p)')),
- ('I', 'include', [], _('include names matching the given patterns')),
- ('X', 'exclude', [], _('exclude names matching the given patterns'))],
+ ] + commands.walkopts,
_('hg purge [OPTION]... [DIR]...'))
}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/hgext/record.py Wed Aug 08 23:00:01 2007 +0200
@@ -0,0 +1,380 @@
+# record.py
+#
+# Copyright 2007 Bryan O'Sullivan <bos@serpentine.com>
+#
+# This software may be used and distributed according to the terms of
+# the GNU General Public License, incorporated herein by reference.
+
+'''interactive change selection during commit'''
+
+from mercurial.i18n import _
+from mercurial import cmdutil, commands, cmdutil, hg, mdiff, patch, revlog
+from mercurial import util
+import copy, cStringIO, errno, operator, os, re, shutil, tempfile
+
+lines_re = re.compile(r'@@ -(\d+),(\d+) \+(\d+),(\d+) @@\s*(.*)')
+
+def scanpatch(fp):
+ lr = patch.linereader(fp)
+
+ def scanwhile(first, p):
+ lines = [first]
+ while True:
+ line = lr.readline()
+ if not line:
+ break
+ if p(line):
+ lines.append(line)
+ else:
+ lr.push(line)
+ break
+ return lines
+
+ while True:
+ line = lr.readline()
+ if not line:
+ break
+ if line.startswith('diff --git a/'):
+ def notheader(line):
+ s = line.split(None, 1)
+ return not s or s[0] not in ('---', 'diff')
+ header = scanwhile(line, notheader)
+ fromfile = lr.readline()
+ if fromfile.startswith('---'):
+ tofile = lr.readline()
+ header += [fromfile, tofile]
+ else:
+ lr.push(fromfile)
+ yield 'file', header
+ elif line[0] == ' ':
+ yield 'context', scanwhile(line, lambda l: l[0] in ' \\')
+ elif line[0] in '-+':
+ yield 'hunk', scanwhile(line, lambda l: l[0] in '-+\\')
+ else:
+ m = lines_re.match(line)
+ if m:
+ yield 'range', m.groups()
+ else:
+ raise patch.PatchError('unknown patch content: %r' % line)
+
+class header(object):
+ diff_re = re.compile('diff --git a/(.*) b/(.*)$')
+ allhunks_re = re.compile('(?:index|new file|deleted file) ')
+ pretty_re = re.compile('(?:new file|deleted file) ')
+ special_re = re.compile('(?:index|new|deleted|copy|rename) ')
+
+ def __init__(self, header):
+ self.header = header
+ self.hunks = []
+
+ def binary(self):
+ for h in self.header:
+ if h.startswith('index '):
+ return True
+
+ def pretty(self, fp):
+ for h in self.header:
+ if h.startswith('index '):
+ fp.write(_('this modifies a binary file (all or nothing)\n'))
+ break
+ if self.pretty_re.match(h):
+ fp.write(h)
+ if self.binary():
+ fp.write(_('this is a binary file\n'))
+ break
+ if h.startswith('---'):
+ fp.write(_('%d hunks, %d lines changed\n') %
+ (len(self.hunks),
+ sum([h.added + h.removed for h in self.hunks])))
+ break
+ fp.write(h)
+
+ def write(self, fp):
+ fp.write(''.join(self.header))
+
+ def allhunks(self):
+ for h in self.header:
+ if self.allhunks_re.match(h):
+ return True
+
+ def files(self):
+ fromfile, tofile = self.diff_re.match(self.header[0]).groups()
+ if fromfile == tofile:
+ return [fromfile]
+ return [fromfile, tofile]
+
+ def filename(self):
+ return self.files()[-1]
+
+ def __repr__(self):
+ return '<header %s>' % (' '.join(map(repr, self.files())))
+
+ def special(self):
+ for h in self.header:
+ if self.special_re.match(h):
+ return True
+
+def countchanges(hunk):
+ add = len([h for h in hunk if h[0] == '+'])
+ rem = len([h for h in hunk if h[0] == '-'])
+ return add, rem
+
+class hunk(object):
+ maxcontext = 3
+
+ def __init__(self, header, fromline, toline, proc, before, hunk, after):
+ def trimcontext(number, lines):
+ delta = len(lines) - self.maxcontext
+ if False and delta > 0:
+ return number + delta, lines[:self.maxcontext]
+ return number, lines
+
+ self.header = header
+ self.fromline, self.before = trimcontext(fromline, before)
+ self.toline, self.after = trimcontext(toline, after)
+ self.proc = proc
+ self.hunk = hunk
+ self.added, self.removed = countchanges(self.hunk)
+
+ def write(self, fp):
+ delta = len(self.before) + len(self.after)
+ fromlen = delta + self.removed
+ tolen = delta + self.added
+ fp.write('@@ -%d,%d +%d,%d @@%s\n' %
+ (self.fromline, fromlen, self.toline, tolen,
+ self.proc and (' ' + self.proc)))
+ fp.write(''.join(self.before + self.hunk + self.after))
+
+ pretty = write
+
+ def filename(self):
+ return self.header.filename()
+
+ def __repr__(self):
+ return '<hunk %r@%d>' % (self.filename(), self.fromline)
+
+def parsepatch(fp):
+ class parser(object):
+ def __init__(self):
+ self.fromline = 0
+ self.toline = 0
+ self.proc = ''
+ self.header = None
+ self.context = []
+ self.before = []
+ self.hunk = []
+ self.stream = []
+
+ def addrange(self, (fromstart, fromend, tostart, toend, proc)):
+ self.fromline = int(fromstart)
+ self.toline = int(tostart)
+ self.proc = proc
+
+ def addcontext(self, context):
+ if self.hunk:
+ h = hunk(self.header, self.fromline, self.toline, self.proc,
+ self.before, self.hunk, context)
+ self.header.hunks.append(h)
+ self.stream.append(h)
+ self.fromline += len(self.before) + h.removed
+ self.toline += len(self.before) + h.added
+ self.before = []
+ self.hunk = []
+ self.proc = ''
+ self.context = context
+
+ def addhunk(self, hunk):
+ if self.context:
+ self.before = self.context
+ self.context = []
+ self.hunk = data
+
+ def newfile(self, hdr):
+ self.addcontext([])
+ h = header(hdr)
+ self.stream.append(h)
+ self.header = h
+
+ def finished(self):
+ self.addcontext([])
+ return self.stream
+
+ transitions = {
+ 'file': {'context': addcontext,
+ 'file': newfile,
+ 'hunk': addhunk,
+ 'range': addrange},
+ 'context': {'file': newfile,
+ 'hunk': addhunk,
+ 'range': addrange},
+ 'hunk': {'context': addcontext,
+ 'file': newfile,
+ 'range': addrange},
+ 'range': {'context': addcontext,
+ 'hunk': addhunk},
+ }
+
+ p = parser()
+
+ state = 'context'
+ for newstate, data in scanpatch(fp):
+ try:
+ p.transitions[state][newstate](p, data)
+ except KeyError:
+ raise patch.PatchError('unhandled transition: %s -> %s' %
+ (state, newstate))
+ state = newstate
+ return p.finished()
+
+def filterpatch(ui, chunks):
+ chunks = list(chunks)
+ chunks.reverse()
+ seen = {}
+ def consumefile():
+ consumed = []
+ while chunks:
+ if isinstance(chunks[-1], header):
+ break
+ else:
+ consumed.append(chunks.pop())
+ return consumed
+ resp = None
+ applied = {}
+ while chunks:
+ chunk = chunks.pop()
+ if isinstance(chunk, header):
+ fixoffset = 0
+ hdr = ''.join(chunk.header)
+ if hdr in seen:
+ consumefile()
+ continue
+ seen[hdr] = True
+ if not resp:
+ chunk.pretty(ui)
+ r = resp or ui.prompt(_('record changes to %s? [y]es [n]o') %
+ _(' and ').join(map(repr, chunk.files())),
+ '(?:|[yYnNqQaA])$') or 'y'
+ if r in 'aA':
+ r = 'y'
+ resp = 'y'
+ if r in 'qQ':
+ raise util.Abort(_('user quit'))
+ if r in 'yY':
+ applied[chunk.filename()] = [chunk]
+ if chunk.allhunks():
+ applied[chunk.filename()] += consumefile()
+ else:
+ consumefile()
+ else:
+ if not resp:
+ chunk.pretty(ui)
+ r = resp or ui.prompt(_('record this change to %r? [y]es [n]o') %
+ chunk.filename(), '(?:|[yYnNqQaA])$') or 'y'
+ if r in 'aA':
+ r = 'y'
+ resp = 'y'
+ if r in 'qQ':
+ raise util.Abort(_('user quit'))
+ if r in 'yY':
+ if fixoffset:
+ chunk = copy.copy(chunk)
+ chunk.toline += fixoffset
+ applied[chunk.filename()].append(chunk)
+ else:
+ fixoffset += chunk.removed - chunk.added
+ return reduce(operator.add, [h for h in applied.itervalues()
+ if h[0].special() or len(h) > 1], [])
+
+def record(ui, repo, *pats, **opts):
+ '''interactively select changes to commit'''
+
+ if not ui.interactive:
+ raise util.Abort(_('running non-interactively, use commit instead'))
+
+ def recordfunc(ui, repo, files, message, match, opts):
+ if files:
+ changes = None
+ else:
+ changes = repo.status(files=files, match=match)[:5]
+ modified, added, removed = changes[:3]
+ files = modified + added + removed
+ diffopts = mdiff.diffopts(git=True, nodates=True)
+ fp = cStringIO.StringIO()
+ patch.diff(repo, repo.dirstate.parents()[0], files=files,
+ match=match, changes=changes, opts=diffopts, fp=fp)
+ fp.seek(0)
+
+ chunks = filterpatch(ui, parsepatch(fp))
+ del fp
+
+ contenders = {}
+ for h in chunks:
+ try: contenders.update(dict.fromkeys(h.files()))
+ except AttributeError: pass
+
+ newfiles = [f for f in files if f in contenders]
+
+ if not newfiles:
+ ui.status(_('no changes to record\n'))
+ return 0
+
+ if changes is None:
+ changes = repo.status(files=newfiles, match=match)[:5]
+ modified = dict.fromkeys(changes[0])
+
+ backups = {}
+ backupdir = repo.join('record-backups')
+ try:
+ os.mkdir(backupdir)
+ except OSError, err:
+ if err.errno != errno.EEXIST:
+ raise
+ try:
+ for f in newfiles:
+ if f not in modified:
+ continue
+ fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_')+'.',
+ dir=backupdir)
+ os.close(fd)
+ ui.debug('backup %r as %r\n' % (f, tmpname))
+ util.copyfile(repo.wjoin(f), tmpname)
+ backups[f] = tmpname
+
+ fp = cStringIO.StringIO()
+ for c in chunks:
+ if c.filename() in backups:
+ c.write(fp)
+ dopatch = fp.tell()
+ fp.seek(0)
+
+ if backups:
+ hg.revert(repo, repo.dirstate.parents()[0], backups.has_key)
+
+ if dopatch:
+ ui.debug('applying patch\n')
+ ui.debug(fp.getvalue())
+ patch.internalpatch(fp, ui, 1, repo.root)
+ del fp
+
+ repo.commit(newfiles, message, opts['user'], opts['date'], match,
+ force_editor=opts.get('force_editor'))
+ return 0
+ finally:
+ try:
+ for realname, tmpname in backups.iteritems():
+ ui.debug('restoring %r to %r\n' % (tmpname, realname))
+ util.copyfile(tmpname, repo.wjoin(realname))
+ os.unlink(tmpname)
+ os.rmdir(backupdir)
+ except OSError:
+ pass
+ return cmdutil.commit(ui, repo, recordfunc, pats, opts)
+
+cmdtable = {
+ "record":
+ (record,
+ [('A', 'addremove', None,
+ _('mark new/missing files as added/removed before committing')),
+ ] + commands.walkopts + commands.commitopts + commands.commitopts2,
+ _('hg record [OPTION]... [FILE]...')),
+}
--- a/hgext/transplant.py Wed Aug 08 22:47:30 2007 +0200
+++ b/hgext/transplant.py Wed Aug 08 23:00:01 2007 +0200
@@ -96,9 +96,10 @@
diffopts = patch.diffopts(self.ui, opts)
diffopts.git = True
- wlock = repo.wlock()
- lock = repo.lock()
+ lock = wlock = None
try:
+ wlock = repo.wlock()
+ lock = repo.lock()
for rev in revs:
node = revmap[rev]
revstr = '%s:%s' % (rev, revlog.short(node))
@@ -118,9 +119,8 @@
continue
if pulls:
if source != repo:
- repo.pull(source, heads=pulls, lock=lock)
- merge.update(repo, pulls[-1], False, False, None,
- wlock=wlock)
+ repo.pull(source, heads=pulls)
+ merge.update(repo, pulls[-1], False, False, None)
p1, p2 = repo.dirstate.parents()
pulls = []
@@ -131,7 +131,7 @@
# fail.
domerge = True
if not hasnode(repo, node):
- repo.pull(source, heads=[node], lock=lock)
+ repo.pull(source, heads=[node])
if parents[1] != revlog.nullid:
self.ui.note(_('skipping merge changeset %s:%s\n')
@@ -146,11 +146,11 @@
del revmap[rev]
if patchfile or domerge:
try:
- n = self.applyone(repo, node, source.changelog.read(node),
+ n = self.applyone(repo, node,
+ source.changelog.read(node),
patchfile, merge=domerge,
log=opts.get('log'),
- filter=opts.get('filter'),
- lock=lock, wlock=wlock)
+ filter=opts.get('filter'))
if n and domerge:
self.ui.status(_('%s merged at %s\n') % (revstr,
revlog.short(n)))
@@ -161,11 +161,12 @@
if patchfile:
os.unlink(patchfile)
if pulls:
- repo.pull(source, heads=pulls, lock=lock)
- merge.update(repo, pulls[-1], False, False, None, wlock=wlock)
+ repo.pull(source, heads=pulls)
+ merge.update(repo, pulls[-1], False, False, None)
finally:
self.saveseries(revmap, merges)
self.transplants.write()
+ del lock, wlock
def filter(self, filter, changelog, patchfile):
'''arbitrarily rewrite changeset before applying it'''
@@ -193,7 +194,7 @@
return (user, date, msg)
def applyone(self, repo, node, cl, patchfile, merge=False, log=False,
- filter=None, lock=None, wlock=None):
+ filter=None):
'''apply the patch in patchfile to the repository as a transplant'''
(manifest, user, (time, timezone), files, message) = cl[:5]
date = "%d %d" % (time, timezone)
@@ -219,7 +220,7 @@
self.ui.warn(_('%s: empty changeset') % revlog.hex(node))
return None
finally:
- files = patch.updatedir(self.ui, repo, files, wlock=wlock)
+ files = patch.updatedir(self.ui, repo, files)
except Exception, inst:
if filter:
os.unlink(patchfile)
@@ -237,8 +238,7 @@
p1, p2 = repo.dirstate.parents()
repo.dirstate.setparents(p1, node)
- n = repo.commit(files, message, user, date, lock=lock, wlock=wlock,
- extra=extra)
+ n = repo.commit(files, message, user, date, extra=extra)
if not merge:
self.transplants.set(n, node)
@@ -272,20 +272,24 @@
extra = {'transplant_source': node}
wlock = repo.wlock()
- p1, p2 = repo.dirstate.parents()
- if p1 != parents[0]:
- raise util.Abort(_('working dir not at transplant parent %s') %
- revlog.hex(parents[0]))
- if merge:
- repo.dirstate.setparents(p1, parents[1])
- n = repo.commit(None, message, user, date, wlock=wlock, extra=extra)
- if not n:
- raise util.Abort(_('commit failed'))
- if not merge:
- self.transplants.set(n, node)
- self.unlog()
+ try:
+ p1, p2 = repo.dirstate.parents()
+ if p1 != parents[0]:
+ raise util.Abort(
+ _('working dir not at transplant parent %s') %
+ revlog.hex(parents[0]))
+ if merge:
+ repo.dirstate.setparents(p1, parents[1])
+ n = repo.commit(None, message, user, date, extra=extra)
+ if not n:
+ raise util.Abort(_('commit failed'))
+ if not merge:
+ self.transplants.set(n, node)
+ self.unlog()
- return n, node
+ return n, node
+ finally:
+ del wlock
def readseries(self):
nodes = []
--- a/hgext/win32text.py Wed Aug 08 22:47:30 2007 +0200
+++ b/hgext/win32text.py Wed Aug 08 23:00:01 2007 +0200
@@ -1,7 +1,24 @@
-import mercurial.util
+from mercurial import util, ui
+from mercurial.i18n import gettext as _
+import re
+
+# regexp for single LF without CR preceding.
+re_single_lf = re.compile('(^|[^\r])\n', re.MULTILINE)
def dumbdecode(s, cmd):
- return s.replace('\n', '\r\n')
+ # warn if already has CRLF in repository.
+ # it might cause unexpected eol conversion.
+ # see issue 302:
+ # http://www.selenic.com/mercurial/bts/issue302
+ if '\r\n' in s:
+ u = ui.ui()
+ u.warn(_('WARNING: file in repository already has CRLF line ending \n'
+ ' which does not need eol conversion by win32text plugin.\n'
+ ' Please reconsider encode/decode setting in'
+ ' mercurial.ini or .hg/hgrc\n'
+ ' before next commit.\n'))
+ # replace single LF to CRLF
+ return re_single_lf.sub('\\1\r\n', s)
def dumbencode(s, cmd):
return s.replace('\r\n', '\n')
@@ -20,7 +37,7 @@
return dumbencode(s, cmd)
return s
-mercurial.util.filtertable.update({
+util.filtertable.update({
'dumbdecode:': dumbdecode,
'dumbencode:': dumbencode,
'cleverdecode:': cleverdecode,
--- a/hgmerge Wed Aug 08 22:47:30 2007 +0200
+++ b/hgmerge Wed Aug 08 23:00:01 2007 +0200
@@ -96,6 +96,20 @@
done
}
+# Check if conflict markers are present and ask if the merge was successful
+conflicts_or_success() {
+ while egrep '^(<<<<<<< .*|=======|>>>>>>> .*)$' "$LOCAL" >/dev/null; do
+ echo "$LOCAL contains conflict markers."
+ echo "Keep this version? [y/n]"
+ read answer
+ case "$answer" in
+ y*|Y*) success;;
+ n*|N*) failure;;
+ esac
+ done
+ success
+}
+
# Clean up when interrupted
trap "failure" 1 2 3 6 15 # HUP INT QUIT ABRT TERM
@@ -123,20 +137,20 @@
# filemerge prefers the right by default
$FILEMERGE -left "$OTHER" -right "$LOCAL" -ancestor "$BASE" -merge "$LOCAL"
[ $? -ne 0 ] && echo "FileMerge failed to launch" && failure
- $TEST "$LOCAL" -nt "$CHGTEST" && success || ask_if_merged
+ $TEST "$LOCAL" -nt "$CHGTEST" && conflicts_or_success || ask_if_merged
fi
if [ -n "$DISPLAY" ]; then
# try using kdiff3, which is fairly nice
if [ -n "$KDIFF3" ]; then
$KDIFF3 --auto "$BASE" "$BACKUP" "$OTHER" -o "$LOCAL" || failure
- success
+ conflicts_or_success
fi
# try using tkdiff, which is a bit less sophisticated
if [ -n "$TKDIFF" ]; then
$TKDIFF "$BACKUP" "$OTHER" -a "$BASE" -o "$LOCAL" || failure
- success
+ conflicts_or_success
fi
if [ -n "$MELD" ]; then
@@ -147,7 +161,7 @@
# use the file with conflicts
$MELD "$LOCAL.tmp.$RAND" "$LOCAL" "$OTHER" || failure
# Also it doesn't return good error code
- $TEST "$LOCAL" -nt "$CHGTEST" && success || ask_if_merged
+ $TEST "$LOCAL" -nt "$CHGTEST" && conflicts_or_success || ask_if_merged
fi
fi
@@ -158,7 +172,7 @@
$EDITOR "$LOCAL" || failure
# Some editors do not return meaningful error codes
# Do not take any chances
- $TEST "$LOCAL" -nt "$CHGTEST" && success || ask_if_merged
+ $TEST "$LOCAL" -nt "$CHGTEST" && conflicts_or_success || ask_if_merged
fi
# attempt to manually merge with diff and patch
--- a/mercurial/bundlerepo.py Wed Aug 08 22:47:30 2007 +0200
+++ b/mercurial/bundlerepo.py Wed Aug 08 23:00:01 2007 +0200
@@ -12,8 +12,7 @@
from node import *
from i18n import _
-import changegroup, util, os, struct, bz2, tempfile
-
+import changegroup, util, os, struct, bz2, tempfile, mdiff
import localrepo, changelog, manifest, filelog, revlog
class bundlerevlog(revlog.revlog):
@@ -58,13 +57,10 @@
if not prev:
prev = p1
# start, size, base is not used, link, p1, p2, delta ref
- if self.version == revlog.REVLOGV0:
- e = (start, size, None, link, p1, p2, node)
- else:
- e = (self.offset_type(start, 0), size, -1, None, link,
- self.rev(p1), self.rev(p2), node)
+ e = (revlog.offset_type(start, 0), size, -1, None, link,
+ self.rev(p1), self.rev(p2), node)
self.basemap[n] = prev
- self.index.append(e)
+ self.index.insert(-1, e)
self.nodemap[node] = n
prev = node
n += 1
@@ -80,7 +76,7 @@
# not against rev - 1
# XXX: could use some caching
if not self.bundle(rev):
- return revlog.revlog.chunk(self, rev, df, cachelen)
+ return revlog.revlog.chunk(self, rev, df)
self.bundlefile.seek(self.start(rev))
return self.bundlefile.read(self.length(rev))
@@ -94,7 +90,7 @@
elif not self.bundle(rev1) and not self.bundle(rev2):
return revlog.revlog.revdiff(self, rev1, rev2)
- return self.diff(self.revision(self.node(rev1)),
+ return mdiff.textdiff(self.revision(self.node(rev1)),
self.revision(self.node(rev2)))
def revision(self, node):
@@ -107,8 +103,8 @@
rev = self.rev(iter_node)
# reconstruct the revision if it is from a changegroup
while self.bundle(rev):
- if self.cache and self.cache[0] == iter_node:
- text = self.cache[2]
+ if self._cache and self._cache[0] == iter_node:
+ text = self._cache[2]
break
chain.append(rev)
iter_node = self.bundlebase(rev)
@@ -118,14 +114,14 @@
while chain:
delta = self.chunk(chain.pop())
- text = self.patches(text, [delta])
+ text = mdiff.patches(text, [delta])
p1, p2 = self.parents(node)
if node != revlog.hash(text, p1, p2):
raise revlog.RevlogError(_("integrity check failed on %s:%d")
% (self.datafile, self.rev(node)))
- self.cache = (node, self.rev(node), text)
+ self._cache = (node, self.rev(node), text)
return text
def addrevision(self, text, transaction, link, p1=None, p2=None, d=None):
--- a/mercurial/changelog.py Wed Aug 08 22:47:30 2007 +0200
+++ b/mercurial/changelog.py Wed Aug 08 23:00:01 2007 +0200
@@ -58,7 +58,6 @@
def read(self, count=-1):
'''only trick here is reads that span real file and data'''
ret = ""
- old_offset = self.offset
if self.offset < self.size:
s = self.fp.read(count)
ret = s
@@ -131,7 +130,10 @@
return extra
def encode_extra(self, d):
- items = [_string_escape(":".join(t)) for t in d.iteritems()]
+ # keys must be sorted to produce a deterministic changelog entry
+ keys = d.keys()
+ keys.sort()
+ items = [_string_escape('%s:%s' % (k, d[k])) for k in keys]
return "\0".join(items)
def extract(self, text):
--- a/mercurial/cmdutil.py Wed Aug 08 22:47:30 2007 +0200
+++ b/mercurial/cmdutil.py Wed Aug 08 23:00:01 2007 +0200
@@ -8,6 +8,7 @@
from node import *
from i18n import _
import os, sys, atexit, signal, pdb, traceback, socket, errno, shlex
+import bisect, stat
import mdiff, bdiff, util, templater, patch, commands, hg, lock, time
import fancyopts, revlog, version, extensions, hook
@@ -625,8 +626,7 @@
if bestname:
yield bestname, a, bestscore
-def addremove(repo, pats=[], opts={}, wlock=None, dry_run=None,
- similarity=None):
+def addremove(repo, pats=[], opts={}, dry_run=None, similarity=None):
if dry_run is None:
dry_run = opts.get('dry_run')
if similarity is None:
@@ -635,19 +635,19 @@
mapping = {}
for src, abs, rel, exact in walk(repo, pats, opts):
target = repo.wjoin(abs)
- if src == 'f' and repo.dirstate.state(abs) == '?':
+ if src == 'f' and abs not in repo.dirstate:
add.append(abs)
mapping[abs] = rel, exact
if repo.ui.verbose or not exact:
repo.ui.status(_('adding %s\n') % ((pats and rel) or abs))
- if repo.dirstate.state(abs) != 'r' and not util.lexists(target):
+ if repo.dirstate[abs] != 'r' and not util.lexists(target):
remove.append(abs)
mapping[abs] = rel, exact
if repo.ui.verbose or not exact:
repo.ui.status(_('removing %s\n') % ((pats and rel) or abs))
if not dry_run:
- repo.add(add, wlock=wlock)
- repo.remove(remove, wlock=wlock)
+ repo.add(add)
+ repo.remove(remove)
if similarity > 0:
for old, new, score in findrenames(repo, add, remove, similarity):
oldrel, oldexact = mapping[old]
@@ -657,7 +657,7 @@
'(%d%% similar)\n') %
(oldrel, newrel, score * 100))
if not dry_run:
- repo.copy(old, new, wlock=wlock)
+ repo.copy(old, new)
def service(opts, parentfn=None, initfn=None, runfn=None):
'''Run a command as a service.'''
@@ -1273,3 +1273,45 @@
for rev in nrevs:
yield 'iter', rev, None
return iterate(), matchfn
+
+def commit(ui, repo, commitfunc, pats, opts):
+ '''commit the specified files or all outstanding changes'''
+ message = logmessage(opts)
+
+ if opts['addremove']:
+ addremove(repo, pats, opts)
+ fns, match, anypats = matchpats(repo, pats, opts)
+ if pats:
+ status = repo.status(files=fns, match=match)
+ modified, added, removed, deleted, unknown = status[:5]
+ files = modified + added + removed
+ slist = None
+ for f in fns:
+ if f == '.':
+ continue
+ if f not in files:
+ rf = repo.wjoin(f)
+ try:
+ mode = os.lstat(rf)[stat.ST_MODE]
+ except OSError:
+ raise util.Abort(_("file %s not found!") % rf)
+ if stat.S_ISDIR(mode):
+ name = f + '/'
+ if slist is None:
+ slist = list(files)
+ slist.sort()
+ i = bisect.bisect(slist, name)
+ if i >= len(slist) or not slist[i].startswith(name):
+ raise util.Abort(_("no match under directory %s!")
+ % rf)
+ elif not (stat.S_ISREG(mode) or stat.S_ISLNK(mode)):
+ raise util.Abort(_("can't commit %s: "
+ "unsupported file type!") % rf)
+ elif f not in repo.dirstate:
+ raise util.Abort(_("file %s not tracked!") % rf)
+ else:
+ files = []
+ try:
+ return commitfunc(ui, repo, files, message, match, opts)
+ except ValueError, inst:
+ raise util.Abort(str(inst))
--- a/mercurial/commands.py Wed Aug 08 22:47:30 2007 +0200
+++ b/mercurial/commands.py Wed Aug 08 23:00:01 2007 +0200
@@ -8,7 +8,7 @@
import demandimport; demandimport.enable()
from node import *
from i18n import _
-import bisect, os, re, sys, urllib, shlex, stat
+import os, re, sys, urllib
import ui, hg, util, revlog, bundlerepo, extensions
import difflib, patch, time, help, mdiff, tempfile
import errno, version, socket
@@ -33,7 +33,7 @@
if ui.verbose:
ui.status(_('adding %s\n') % rel)
names.append(abs)
- elif repo.dirstate.state(abs) == '?':
+ elif abs not in repo.dirstate:
ui.status(_('adding %s\n') % rel)
names.append(abs)
if not opts.get('dry_run'):
@@ -73,19 +73,31 @@
detects as binary. With -a, annotate will generate an annotation
anyway, probably with undesirable results.
"""
- getdate = util.cachefunc(lambda x: util.datestr(x.date()))
+ getdate = util.cachefunc(lambda x: util.datestr(x[0].date()))
if not pats:
raise util.Abort(_('at least one file name or pattern required'))
- opmap = [['user', lambda x: ui.shortuser(x.user())],
- ['number', lambda x: str(x.rev())],
- ['changeset', lambda x: short(x.node())],
- ['date', getdate], ['follow', lambda x: x.path()]]
+ opmap = [('user', lambda x: ui.shortuser(x[0].user())),
+ ('number', lambda x: str(x[0].rev())),
+ ('changeset', lambda x: short(x[0].node())),
+ ('date', getdate),
+ ('follow', lambda x: x[0].path()),
+ ]
+
if (not opts['user'] and not opts['changeset'] and not opts['date']
and not opts['follow']):
opts['number'] = 1
+ linenumber = opts.get('line_number') is not None
+ if (linenumber and (not opts['changeset']) and (not opts['number'])):
+ raise util.Abort(_('at least one of -n/-c is required for -l'))
+
+ funcmap = [func for op, func in opmap if opts.get(op)]
+ if linenumber:
+ lastfunc = funcmap[-1]
+ funcmap[-1] = lambda x: "%s:%s" % (lastfunc(x), x[1])
+
ctx = repo.changectx(opts['rev'])
for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
@@ -95,15 +107,15 @@
ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
continue
- lines = fctx.annotate(follow=opts.get('follow'))
+ lines = fctx.annotate(follow=opts.get('follow'),
+ linenumber=linenumber)
pieces = []
- for o, f in opmap:
- if opts[o]:
- l = [f(n) for n, dummy in lines]
- if l:
- m = max(map(len, l))
- pieces.append(["%*s" % (m, x) for x in l])
+ for f in funcmap:
+ l = [f(n) for n, dummy in lines]
+ if l:
+ m = max(map(len, l))
+ pieces.append(["%*s" % (m, x) for x in l])
if pieces:
for p, l in zip(zip(*pieces), lines):
@@ -416,48 +428,12 @@
If no commit message is specified, the editor configured in your hgrc
or in the EDITOR environment variable is started to enter a message.
"""
- message = cmdutil.logmessage(opts)
-
- if opts['addremove']:
- cmdutil.addremove(repo, pats, opts)
- fns, match, anypats = cmdutil.matchpats(repo, pats, opts)
- if pats:
- status = repo.status(files=fns, match=match)
- modified, added, removed, deleted, unknown = status[:5]
- files = modified + added + removed
- slist = None
- for f in fns:
- if f == '.':
- continue
- if f not in files:
- rf = repo.wjoin(f)
- try:
- mode = os.lstat(rf)[stat.ST_MODE]
- except OSError:
- raise util.Abort(_("file %s not found!") % rf)
- if stat.S_ISDIR(mode):
- name = f + '/'
- if slist is None:
- slist = list(files)
- slist.sort()
- i = bisect.bisect(slist, name)
- if i >= len(slist) or not slist[i].startswith(name):
- raise util.Abort(_("no match under directory %s!")
- % rf)
- elif not (stat.S_ISREG(mode) or stat.S_ISLNK(mode)):
- raise util.Abort(_("can't commit %s: "
- "unsupported file type!") % rf)
- elif repo.dirstate.state(f) == '?':
- raise util.Abort(_("file %s not tracked!") % rf)
- else:
- files = []
- try:
- repo.commit(files, message, opts['user'], opts['date'], match,
- force_editor=opts.get('force_editor'))
- except ValueError, inst:
- raise util.Abort(str(inst))
-
-def docopy(ui, repo, pats, opts, wlock):
+ def commitfunc(ui, repo, files, message, match, opts):
+ return repo.commit(files, message, opts['user'], opts['date'], match,
+ force_editor=opts.get('force_editor'))
+ cmdutil.commit(ui, repo, commitfunc, pats, opts)
+
+def docopy(ui, repo, pats, opts):
# called with the repo lock held
#
# hgsep => pathname that uses "/" to separate directories
@@ -473,7 +449,7 @@
def okaytocopy(abs, rel, exact):
reasons = {'?': _('is not managed'),
'r': _('has been marked for remove')}
- state = repo.dirstate.state(abs)
+ state = repo.dirstate[abs]
reason = reasons.get(state)
if reason:
if exact:
@@ -501,7 +477,7 @@
repo.pathto(prevsrc, cwd)))
return
if (not opts['after'] and os.path.exists(target) or
- opts['after'] and repo.dirstate.state(abstarget) not in '?ar'):
+ opts['after'] and repo.dirstate[abstarget] in 'mn'):
if not opts['force']:
ui.warn(_('%s: not overwriting - file exists\n') %
reltarget)
@@ -516,16 +492,16 @@
if not os.path.isdir(targetdir) and not opts.get('dry_run'):
os.makedirs(targetdir)
try:
- restore = repo.dirstate.state(abstarget) == 'r'
+ restore = repo.dirstate[abstarget] == 'r'
if restore and not opts.get('dry_run'):
- repo.undelete([abstarget], wlock)
+ repo.undelete([abstarget])
try:
if not opts.get('dry_run'):
util.copyfile(src, target)
restore = False
finally:
if restore:
- repo.remove([abstarget], wlock=wlock)
+ repo.remove([abstarget])
except IOError, inst:
if inst.errno == errno.ENOENT:
ui.warn(_('%s: deleted in working copy\n') % relsrc)
@@ -538,15 +514,15 @@
ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
targets[abstarget] = abssrc
if abstarget != origsrc:
- if repo.dirstate.state(origsrc) == 'a':
+ if repo.dirstate[origsrc] == 'a':
if not ui.quiet:
ui.warn(_("%s has not been committed yet, so no copy "
"data will be stored for %s.\n")
% (repo.pathto(origsrc, cwd), reltarget))
if abstarget not in repo.dirstate and not opts.get('dry_run'):
- repo.add([abstarget], wlock)
+ repo.add([abstarget])
elif not opts.get('dry_run'):
- repo.copy(origsrc, abstarget, wlock)
+ repo.copy(origsrc, abstarget)
copied.append((abssrc, relsrc, exact))
# pat: ossep
@@ -666,8 +642,11 @@
This command takes effect in the next commit. To undo a copy
before that, see hg revert.
"""
- wlock = repo.wlock(0)
- errs, copied = docopy(ui, repo, pats, opts, wlock)
+ wlock = repo.wlock(False)
+ try:
+ errs, copied = docopy(ui, repo, pats, opts)
+ finally:
+ del wlock
return errs
def debugancestor(ui, index, rev1, rev2):
@@ -704,17 +683,19 @@
ctx = repo.changectx(rev)
files = ctx.manifest()
wlock = repo.wlock()
- repo.dirstate.rebuild(rev, files)
+ try:
+ repo.dirstate.rebuild(rev, files)
+ finally:
+ del wlock
def debugcheckstate(ui, repo):
"""validate the correctness of the current dirstate"""
parent1, parent2 = repo.dirstate.parents()
- dc = repo.dirstate
m1 = repo.changectx(parent1).manifest()
m2 = repo.changectx(parent2).manifest()
errors = 0
- for f in dc:
- state = repo.dirstate.state(f)
+ for f in repo.dirstate:
+ state = repo.dirstate[f]
if state in "nr" and f not in m1:
ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
errors += 1
@@ -726,7 +707,7 @@
(f, state))
errors += 1
for f in m1:
- state = repo.dirstate.state(f)
+ state = repo.dirstate[f]
if state not in "nrm":
ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
errors += 1
@@ -774,12 +755,14 @@
try:
repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
finally:
- wlock.release()
+ del wlock
def debugstate(ui, repo):
"""show the contents of the current dirstate"""
- dc = repo.dirstate
- for file_ in dc:
+ dc = repo.dirstate._map
+ k = dc.keys()
+ k.sort()
+ for file_ in k:
if dc[file_][3] == -1:
# Pad or slice to locale representation
locale_len = len(time.strftime("%x %X", time.localtime(0)))
@@ -841,7 +824,7 @@
'''test Mercurial installation'''
def writetemp(contents):
- (fd, name) = tempfile.mkstemp()
+ (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
f = os.fdopen(fd, "wb")
f.write(contents)
f.close()
@@ -1572,70 +1555,75 @@
d = opts["base"]
strip = opts["strip"]
-
- wlock = repo.wlock()
- lock = repo.lock()
-
- for p in patches:
- pf = os.path.join(d, p)
-
- if pf == '-':
- ui.status(_("applying patch from stdin\n"))
- tmpname, message, user, date, branch, nodeid, p1, p2 = patch.extract(ui, sys.stdin)
- else:
- ui.status(_("applying %s\n") % p)
- tmpname, message, user, date, branch, nodeid, p1, p2 = patch.extract(ui, file(pf, 'rb'))
-
- if tmpname is None:
- raise util.Abort(_('no diffs found'))
-
- try:
- cmdline_message = cmdutil.logmessage(opts)
- if cmdline_message:
- # pickup the cmdline msg
- message = cmdline_message
- elif message:
- # pickup the patch msg
- message = message.strip()
+ wlock = lock = None
+ try:
+ wlock = repo.wlock()
+ lock = repo.lock()
+ for p in patches:
+ pf = os.path.join(d, p)
+
+ if pf == '-':
+ ui.status(_("applying patch from stdin\n"))
+ data = patch.extract(ui, sys.stdin)
else:
- # launch the editor
- message = None
- ui.debug(_('message:\n%s\n') % message)
-
- wp = repo.workingctx().parents()
- if opts.get('exact'):
- if not nodeid or not p1:
- raise util.Abort(_('not a mercurial patch'))
- p1 = repo.lookup(p1)
- p2 = repo.lookup(p2 or hex(nullid))
-
- if p1 != wp[0].node():
- hg.clean(repo, p1, wlock=wlock)
- repo.dirstate.setparents(p1, p2)
- elif p2:
- try:
+ ui.status(_("applying %s\n") % p)
+ data = patch.extract(ui, file(pf, 'rb'))
+
+ tmpname, message, user, date, branch, nodeid, p1, p2 = data
+
+ if tmpname is None:
+ raise util.Abort(_('no diffs found'))
+
+ try:
+ cmdline_message = cmdutil.logmessage(opts)
+ if cmdline_message:
+ # pickup the cmdline msg
+ message = cmdline_message
+ elif message:
+ # pickup the patch msg
+ message = message.strip()
+ else:
+ # launch the editor
+ message = None
+ ui.debug(_('message:\n%s\n') % message)
+
+ wp = repo.workingctx().parents()
+ if opts.get('exact'):
+ if not nodeid or not p1:
+ raise util.Abort(_('not a mercurial patch'))
p1 = repo.lookup(p1)
- p2 = repo.lookup(p2)
- if p1 == wp[0].node():
- repo.dirstate.setparents(p1, p2)
- except hg.RepoError:
- pass
- if opts.get('exact') or opts.get('import_branch'):
- repo.dirstate.setbranch(branch or 'default')
-
- files = {}
- try:
- fuzz = patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
- files=files)
+ p2 = repo.lookup(p2 or hex(nullid))
+
+ if p1 != wp[0].node():
+ hg.clean(repo, p1)
+ repo.dirstate.setparents(p1, p2)
+ elif p2:
+ try:
+ p1 = repo.lookup(p1)
+ p2 = repo.lookup(p2)
+ if p1 == wp[0].node():
+ repo.dirstate.setparents(p1, p2)
+ except hg.RepoError:
+ pass
+ if opts.get('exact') or opts.get('import_branch'):
+ repo.dirstate.setbranch(branch or 'default')
+
+ files = {}
+ try:
+ fuzz = patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
+ files=files)
+ finally:
+ files = patch.updatedir(ui, repo, files)
+ n = repo.commit(files, message, user, date)
+ if opts.get('exact'):
+ if hex(n) != nodeid:
+ repo.rollback()
+ raise util.Abort(_('patch is damaged' +
+ ' or loses information'))
finally:
- files = patch.updatedir(ui, repo, files, wlock=wlock)
- n = repo.commit(files, message, user, date, wlock=wlock, lock=lock)
- if opts.get('exact'):
- if hex(n) != nodeid:
- repo.rollback(wlock=wlock, lock=lock)
- raise util.Abort(_('patch is damaged or loses information'))
- finally:
- os.unlink(tmpname)
+ os.unlink(tmpname)
+ finally:
+ del lock, wlock
def incoming(ui, repo, source="default", **opts):
"""show new changesets found in source
@@ -1750,7 +1738,7 @@
default='relglob'):
if src == 'b':
continue
- if not node and repo.dirstate.state(abs) == '?':
+ if not node and abs not in repo.dirstate:
continue
if opts['fullpath']:
ui.write(os.path.join(repo.root, abs), end)
@@ -2188,7 +2176,6 @@
Modified files and added files are not removed by default. To
remove them, use the -f/--force option.
"""
- names = []
if not opts['after'] and not pats:
raise util.Abort(_('no files specified'))
files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
@@ -2205,7 +2192,7 @@
forget.append(abs)
continue
reason = _('has been marked for add (use -f to force removal)')
- elif repo.dirstate.state(abs) == '?':
+ elif abs not in repo.dirstate:
reason = _('is not managed')
elif opts['after'] and not exact and abs not in deleted:
continue
@@ -2235,16 +2222,19 @@
This command takes effect in the next commit. To undo a rename
before that, see hg revert.
"""
- wlock = repo.wlock(0)
- errs, copied = docopy(ui, repo, pats, opts, wlock)
- names = []
- for abs, rel, exact in copied:
- if ui.verbose or not exact:
- ui.status(_('removing %s\n') % rel)
- names.append(abs)
- if not opts.get('dry_run'):
- repo.remove(names, True, wlock=wlock)
- return errs
+ wlock = repo.wlock(False)
+ try:
+ errs, copied = docopy(ui, repo, pats, opts)
+ names = []
+ for abs, rel, exact in copied:
+ if ui.verbose or not exact:
+ ui.status(_('removing %s\n') % rel)
+ names.append(abs)
+ if not opts.get('dry_run'):
+ repo.remove(names, True)
+ return errs
+ finally:
+ del wlock
def revert(ui, repo, *pats, **opts):
"""revert files or dirs to their states as of some revision
@@ -2298,8 +2288,6 @@
else:
pmf = None
- wlock = repo.wlock()
-
# need all matching names in dirstate and manifest of target rev,
# so have to walk both. do not print errors if files exist in one
# but not other.
@@ -2307,109 +2295,116 @@
names = {}
target_only = {}
- # walk dirstate.
-
- for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
- badmatch=mf.has_key):
- names[abs] = (rel, exact)
- if src == 'b':
- target_only[abs] = True
-
- # walk target manifest.
-
- def badmatch(path):
- if path in names:
- return True
- path_ = path + '/'
- for f in names:
- if f.startswith(path_):
+ wlock = repo.wlock()
+ try:
+ # walk dirstate.
+ for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
+ badmatch=mf.has_key):
+ names[abs] = (rel, exact)
+ if src == 'b':
+ target_only[abs] = True
+
+ # walk target manifest.
+
+ def badmatch(path):
+ if path in names:
return True
- return False
-
- for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
- badmatch=badmatch):
- if abs in names or src == 'b':
- continue
- names[abs] = (rel, exact)
- target_only[abs] = True
-
- changes = repo.status(match=names.has_key, wlock=wlock)[:5]
- modified, added, removed, deleted, unknown = map(dict.fromkeys, changes)
-
- revert = ([], _('reverting %s\n'))
- add = ([], _('adding %s\n'))
- remove = ([], _('removing %s\n'))
- forget = ([], _('forgetting %s\n'))
- undelete = ([], _('undeleting %s\n'))
- update = {}
-
- disptable = (
- # dispatch table:
- # file state
- # action if in target manifest
- # action if not in target manifest
- # make backup if in target manifest
- # make backup if not in target manifest
- (modified, revert, remove, True, True),
- (added, revert, forget, True, False),
- (removed, undelete, None, False, False),
- (deleted, revert, remove, False, False),
- (unknown, add, None, True, False),
- (target_only, add, None, False, False),
- )
-
- entries = names.items()
- entries.sort()
-
- for abs, (rel, exact) in entries:
- mfentry = mf.get(abs)
- target = repo.wjoin(abs)
- def handle(xlist, dobackup):
- xlist[0].append(abs)
- update[abs] = 1
- if dobackup and not opts['no_backup'] and util.lexists(target):
- bakname = "%s.orig" % rel
- ui.note(_('saving current version of %s as %s\n') %
- (rel, bakname))
- if not opts.get('dry_run'):
- util.copyfile(target, bakname)
- if ui.verbose or not exact:
- ui.status(xlist[1] % rel)
- for table, hitlist, misslist, backuphit, backupmiss in disptable:
- if abs not in table: continue
- # file has changed in dirstate
- if mfentry:
- handle(hitlist, backuphit)
- elif misslist is not None:
- handle(misslist, backupmiss)
+ path_ = path + '/'
+ for f in names:
+ if f.startswith(path_):
+ return True
+ return False
+
+ for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
+ badmatch=badmatch):
+ if abs in names or src == 'b':
+ continue
+ names[abs] = (rel, exact)
+ target_only[abs] = True
+
+ changes = repo.status(match=names.has_key)[:5]
+ modified, added, removed, deleted, unknown = map(dict.fromkeys, changes)
+
+ revert = ([], _('reverting %s\n'))
+ add = ([], _('adding %s\n'))
+ remove = ([], _('removing %s\n'))
+ forget = ([], _('forgetting %s\n'))
+ undelete = ([], _('undeleting %s\n'))
+ update = {}
+
+ disptable = (
+ # dispatch table:
+ # file state
+ # action if in target manifest
+ # action if not in target manifest
+ # make backup if in target manifest
+ # make backup if not in target manifest
+ (modified, revert, remove, True, True),
+ (added, revert, forget, True, False),
+ (removed, undelete, None, False, False),
+ (deleted, revert, remove, False, False),
+ (unknown, add, None, True, False),
+ (target_only, add, None, False, False),
+ )
+
+ entries = names.items()
+ entries.sort()
+
+ for abs, (rel, exact) in entries:
+ mfentry = mf.get(abs)
+ target = repo.wjoin(abs)
+ def handle(xlist, dobackup):
+ xlist[0].append(abs)
+ update[abs] = 1
+ if dobackup and not opts['no_backup'] and util.lexists(target):
+ bakname = "%s.orig" % rel
+ ui.note(_('saving current version of %s as %s\n') %
+ (rel, bakname))
+ if not opts.get('dry_run'):
+ util.copyfile(target, bakname)
+ if ui.verbose or not exact:
+ ui.status(xlist[1] % rel)
+ for table, hitlist, misslist, backuphit, backupmiss in disptable:
+ if abs not in table: continue
+ # file has changed in dirstate
+ if mfentry:
+ handle(hitlist, backuphit)
+ elif misslist is not None:
+ handle(misslist, backupmiss)
+ else:
+ if exact: ui.warn(_('file not managed: %s\n') % rel)
+ break
else:
- if exact: ui.warn(_('file not managed: %s\n') % rel)
- break
- else:
- # file has not changed in dirstate
- if node == parent:
- if exact: ui.warn(_('no changes needed to %s\n') % rel)
- continue
- if pmf is None:
- # only need parent manifest in this unlikely case,
- # so do not read by default
- pmf = repo.changectx(parent).manifest()
- if abs in pmf:
- if mfentry:
- # if version of file is same in parent and target
- # manifests, do nothing
- if pmf[abs] != mfentry:
- handle(revert, False)
- else:
- handle(remove, False)
-
- if not opts.get('dry_run'):
- repo.dirstate.forget(forget[0])
- r = hg.revert(repo, node, update.has_key, wlock)
- repo.dirstate.update(add[0], 'a')
- repo.dirstate.update(undelete[0], 'n')
- repo.dirstate.update(remove[0], 'r')
- return r
+ # file has not changed in dirstate
+ if node == parent:
+ if exact: ui.warn(_('no changes needed to %s\n') % rel)
+ continue
+ if pmf is None:
+ # only need parent manifest in this unlikely case,
+ # so do not read by default
+ pmf = repo.changectx(parent).manifest()
+ if abs in pmf:
+ if mfentry:
+ # if version of file is same in parent and target
+ # manifests, do nothing
+ if pmf[abs] != mfentry:
+ handle(revert, False)
+ else:
+ handle(remove, False)
+
+ if not opts.get('dry_run'):
+ for f in forget[0]:
+ repo.dirstate.forget(f)
+ r = hg.revert(repo, node, update.has_key)
+ for f in add[0]:
+ repo.dirstate.add(f)
+ for f in undelete[0]:
+ repo.dirstate.normal(f)
+ for f in remove[0]:
+ repo.dirstate.remove(f)
+ return r
+ finally:
+ del wlock
def rollback(ui, repo):
"""roll back the last transaction in this repository
@@ -2467,7 +2462,7 @@
parentui = ui.parentui or ui
optlist = ("name templates style address port ipv6"
- " accesslog errorlog webdir_conf")
+ " accesslog errorlog webdir_conf certificate")
for o in optlist.split():
if opts[o]:
parentui.setconfig("web", o, str(opts[o]))
@@ -2650,7 +2645,6 @@
bundle command.
"""
fnames = (fname1,) + fnames
- result = None
for fname in fnames:
if os.path.exists(fname):
f = open(fname, "rb")
@@ -2757,6 +2751,11 @@
('l', 'logfile', '', _('read commit message from <file>')),
]
+commitopts2 = [
+ ('d', 'date', '', _('record datecode as commit date')),
+ ('u', 'user', '', _('record user as committer')),
+]
+
table = {
"^add": (add, walkopts + dryrunopts, _('hg add [OPTION]... [FILE]...')),
"addremove":
@@ -2774,8 +2773,10 @@
('d', 'date', None, _('list the date')),
('n', 'number', None, _('list the revision number (default)')),
('c', 'changeset', None, _('list the changeset')),
+ ('l', 'line-number', None,
+ _('show line number at the first appearance'))
] + walkopts,
- _('hg annotate [-r REV] [-f] [-a] [-u] [-d] [-n] [-c] FILE...')),
+ _('hg annotate [-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...')),
"archive":
(archive,
[('', 'no-decode', None, _('do not pass files through decoders')),
@@ -2788,11 +2789,9 @@
(backout,
[('', 'merge', None,
_('merge with old dirstate parent after backout')),
- ('d', 'date', '', _('record datecode as commit date')),
('', 'parent', '', _('parent to choose when backing out merge')),
- ('u', 'user', '', _('record user as committer')),
('r', 'rev', '', _('revision to backout')),
- ] + walkopts + commitopts,
+ ] + walkopts + commitopts + commitopts2,
_('hg backout [OPTION]... [-r] REV')),
"branch":
(branch,
@@ -2834,9 +2833,7 @@
(commit,
[('A', 'addremove', None,
_('mark new/missing files as added/removed before committing')),
- ('d', 'date', '', _('record datecode as commit date')),
- ('u', 'user', '', _('record user as commiter')),
- ] + walkopts + commitopts,
+ ] + walkopts + commitopts + commitopts2,
_('hg commit [OPTION]... [FILE]...')),
"copy|cp":
(copy,
@@ -3026,10 +3023,8 @@
"debugrawcommit|rawcommit":
(rawcommit,
[('p', 'parent', [], _('parent')),
- ('d', 'date', '', _('date code')),
- ('u', 'user', '', _('user')),
('F', 'files', '', _('file list'))
- ] + commitopts,
+ ] + commitopts + commitopts2,
_('hg debugrawcommit [OPTION]... [FILE]...')),
"recover": (recover, [], _('hg recover')),
"^remove|rm":
@@ -3075,7 +3070,8 @@
('', 'stdio', None, _('for remote clients')),
('t', 'templates', '', _('web templates to use')),
('', 'style', '', _('template style to use')),
- ('6', 'ipv6', None, _('use IPv6 in addition to IPv4'))],
+ ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
+ ('', 'certificate', '', _('SSL certificate file'))],
_('hg serve [OPTION]...')),
"^status|st":
(status,
@@ -3098,11 +3094,11 @@
(tag,
[('f', 'force', None, _('replace existing tag')),
('l', 'local', None, _('make the tag local')),
- ('m', 'message', '', _('message for tag commit log entry')),
- ('d', 'date', '', _('record datecode as commit date')),
- ('u', 'user', '', _('record user as commiter')),
('r', 'rev', '', _('revision to tag')),
- ('', 'remove', None, _('remove a tag'))],
+ ('', 'remove', None, _('remove a tag')),
+ # -l/--local is already there, commitopts cannot be used
+ ('m', 'message', '', _('use <text> as commit message')),
+ ] + commitopts2,
_('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME')),
"tags": (tags, [], _('hg tags')),
"tip":
@@ -3126,6 +3122,8 @@
"version": (version_, [], _('hg version')),
}
+extensions.commandtable = table
+
norepo = ("clone init version help debugancestor debugcomplete debugdata"
" debugindex debugindexdot debugdate debuginstall")
optionalrepo = ("paths serve showconfig")
--- a/mercurial/context.py Wed Aug 08 22:47:30 2007 +0200
+++ b/mercurial/context.py Wed Aug 08 23:00:01 2007 +0200
@@ -60,6 +60,18 @@
else:
raise AttributeError, name
+ def __contains__(self, key):
+ return key in self._manifest
+
+ def __getitem__(self, key):
+ return self.filectx(key)
+
+ def __iter__(self):
+ a = self._manifest.keys()
+ a.sort()
+ for f in a:
+ return f
+
def changeset(self): return self._changeset
def manifest(self): return self._manifest
@@ -184,7 +196,7 @@
def __eq__(self, other):
try:
return (self._path == other._path
- and self._changeid == other._changeid)
+ and self._fileid == other._fileid)
except AttributeError:
return False
@@ -240,14 +252,32 @@
return [filectx(self._repo, self._path, fileid=x,
filelog=self._filelog) for x in c]
- def annotate(self, follow=False):
+ def annotate(self, follow=False, linenumber=None):
'''returns a list of tuples of (ctx, line) for each line
in the file, where ctx is the filectx of the node where
- that line was last changed'''
+ that line was last changed.
+ This returns tuples of ((ctx, linenumber), line) for each line,
+ if "linenumber" parameter is NOT "None".
+ In such tuples, linenumber means one at the first appearance
+ in the managed file.
+ To reduce annotation cost,
+ this returns fixed value(False is used) as linenumber,
+ if "linenumber" parameter is "False".'''
- def decorate(text, rev):
+ def decorate_compat(text, rev):
return ([rev] * len(text.splitlines()), text)
+ def without_linenumber(text, rev):
+ return ([(rev, False)] * len(text.splitlines()), text)
+
+ def with_linenumber(text, rev):
+ size = len(text.splitlines())
+ return ([(rev, i) for i in xrange(1, size + 1)], text)
+
+ decorate = (((linenumber is None) and decorate_compat) or
+ (linenumber and with_linenumber) or
+ without_linenumber)
+
def pair(parent, child):
for a1, a2, b1, b2 in bdiff.blocks(parent[1], child[1]):
child[0][b1:b2] = parent[0][a1:a2]
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/diffhelpers.c Wed Aug 08 23:00:01 2007 +0200
@@ -0,0 +1,150 @@
+/*
+ * diffhelpers.c - helper routines for mpatch
+ *
+ * Copyright 2007 Chris Mason <chris.mason@oracle.com>
+ *
+ * This software may be used and distributed according to the terms
+ * of the GNU General Public License v2, incorporated herein by reference.
+ */
+
+#include <Python.h>
+#include <stdlib.h>
+#include <string.h>
+
+static char diffhelpers_doc[] = "Efficient diff parsing";
+static PyObject *diffhelpers_Error;
+
+
+/* fixup the last lines of a and b when the patch has no newline at eof */
+static void _fix_newline(PyObject *hunk, PyObject *a, PyObject *b)
+{
+ int hunksz = PyList_Size(hunk);
+ PyObject *s = PyList_GET_ITEM(hunk, hunksz-1);
+ char *l = PyString_AS_STRING(s);
+ int sz = PyString_GET_SIZE(s);
+ int alen = PyList_Size(a);
+ int blen = PyList_Size(b);
+ char c = l[0];
+
+ PyObject *hline = PyString_FromStringAndSize(l, sz-1);
+ if (c == ' ' || c == '+') {
+ PyObject *rline = PyString_FromStringAndSize(l+1, sz-2);
+ PyList_SetItem(b, blen-1, rline);
+ }
+ if (c == ' ' || c == '-') {
+ Py_INCREF(hline);
+ PyList_SetItem(a, alen-1, hline);
+ }
+ PyList_SetItem(hunk, hunksz-1, hline);
+}
+
+/* python callable form of _fix_newline */
+static PyObject *
+fix_newline(PyObject *self, PyObject *args)
+{
+ PyObject *hunk, *a, *b;
+ if (!PyArg_ParseTuple(args, "OOO", &hunk, &a, &b))
+ return NULL;
+ _fix_newline(hunk, a, b);
+ return Py_BuildValue("l", 0);
+}
+
+/*
+ * read lines from fp into the hunk. The hunk is parsed into two arrays
+ * a and b. a gets the old state of the text, b gets the new state
+ * The control char from the hunk is saved when inserting into a, but not b
+ * (for performance while deleting files)
+ */
+static PyObject *
+addlines(PyObject *self, PyObject *args)
+{
+
+ PyObject *fp, *hunk, *a, *b, *x;
+ int i;
+ int lena, lenb;
+ int num;
+ int todoa, todob;
+ char *s, c;
+ PyObject *l;
+ if (!PyArg_ParseTuple(args, "OOiiOO", &fp, &hunk, &lena, &lenb, &a, &b))
+ return NULL;
+
+ while(1) {
+ todoa = lena - PyList_Size(a);
+ todob = lenb - PyList_Size(b);
+ num = todoa > todob ? todoa : todob;
+ if (num == 0)
+ break;
+ for (i = 0 ; i < num ; i++) {
+ x = PyFile_GetLine(fp, 0);
+ s = PyString_AS_STRING(x);
+ c = *s;
+ if (strcmp(s, "\\ No newline at end of file\n") == 0) {
+ _fix_newline(hunk, a, b);
+ continue;
+ }
+ PyList_Append(hunk, x);
+ if (c == '+') {
+ l = PyString_FromString(s + 1);
+ PyList_Append(b, l);
+ Py_DECREF(l);
+ } else if (c == '-') {
+ PyList_Append(a, x);
+ } else {
+ l = PyString_FromString(s + 1);
+ PyList_Append(b, l);
+ Py_DECREF(l);
+ PyList_Append(a, x);
+ }
+ Py_DECREF(x);
+ }
+ }
+ return Py_BuildValue("l", 0);
+}
+
+/*
+ * compare the lines in a with the lines in b. a is assumed to have
+ * a control char at the start of each line, this char is ignored in the
+ * compare
+ */
+static PyObject *
+testhunk(PyObject *self, PyObject *args)
+{
+
+ PyObject *a, *b;
+ long bstart;
+ int alen, blen;
+ int i;
+ char *sa, *sb;
+
+ if (!PyArg_ParseTuple(args, "OOl", &a, &b, &bstart))
+ return NULL;
+ alen = PyList_Size(a);
+ blen = PyList_Size(b);
+ if (alen > blen - bstart) {
+ return Py_BuildValue("l", -1);
+ }
+ for (i = 0 ; i < alen ; i++) {
+ sa = PyString_AS_STRING(PyList_GET_ITEM(a, i));
+ sb = PyString_AS_STRING(PyList_GET_ITEM(b, i + bstart));
+ if (strcmp(sa+1, sb) != 0)
+ return Py_BuildValue("l", -1);
+ }
+ return Py_BuildValue("l", 0);
+}
+
+static PyMethodDef methods[] = {
+ {"addlines", addlines, METH_VARARGS, "add lines to a hunk\n"},
+ {"fix_newline", fix_newline, METH_VARARGS, "fixup newline counters\n"},
+ {"testhunk", testhunk, METH_VARARGS, "test lines in a hunk\n"},
+ {NULL, NULL}
+};
+
+PyMODINIT_FUNC
+initdiffhelpers(void)
+{
+ Py_InitModule3("diffhelpers", methods, diffhelpers_doc);
+ diffhelpers_Error = PyErr_NewException("diffhelpers.diffhelpersError",
+ NULL, NULL);
+}
+
--- a/mercurial/dirstate.py Wed Aug 08 22:47:30 2007 +0200
+++ b/mercurial/dirstate.py Wed Aug 08 23:00:01 2007 +0200
@@ -20,8 +20,8 @@
def __init__(self, opener, ui, root):
self._opener = opener
self._root = root
- self._dirty = 0
- self._dirtypl = 0
+ self._dirty = False
+ self._dirtypl = False
self._ui = ui
def __getattr__(self, name):
@@ -53,7 +53,7 @@
self._incpath(f)
return self._dirs
elif name == '_ignore':
- files = [self.wjoin('.hgignore')]
+ files = [self._join('.hgignore')]
for name, path in self._ui.configitems("ui"):
if name == 'ignore' or name.startswith('ignore.'):
files.append(os.path.expanduser(path))
@@ -65,7 +65,7 @@
else:
raise AttributeError, name
- def wjoin(self, f):
+ def _join(self, f):
return os.path.join(self._root, f)
def getcwd(self):
@@ -89,11 +89,14 @@
return path.replace(os.sep, '/')
return path
- def __del__(self):
- self.write()
-
def __getitem__(self, key):
- return self._map[key]
+ ''' current states:
+ n normal
+ m needs merging
+ r marked for removal
+ a marked for addition
+ ? not tracked'''
+ return self._map.get(key, ("?",))[0]
def __contains__(self, key):
return key in self._map
@@ -110,21 +113,14 @@
def branch(self):
return self._branch
- def markdirty(self):
- self._dirty = 1
-
def setparents(self, p1, p2=nullid):
- self.markdirty()
- self._dirtypl = 1
+ self._dirty = self._dirtypl = True
self._pl = p1, p2
def setbranch(self, branch):
self._branch = branch
self._opener("branch", "w").write(branch + '\n')
- def state(self, key):
- return self._map.get(key, ("?",))[0]
-
def _read(self):
self._map = {}
self._copymap = {}
@@ -166,10 +162,10 @@
for a in "_map _copymap _branch _pl _dirs _ignore".split():
if a in self.__dict__:
delattr(self, a)
- self._dirty = 0
+ self._dirty = False
def copy(self, source, dest):
- self.markdirty()
+ self._dirty = True
self._copymap[dest] = source
def copied(self, file):
@@ -195,56 +191,70 @@
raise util.Abort(_("'\\n' and '\\r' disallowed in filenames"))
# shadows
if f in self._dirs:
- raise util.Abort(_('directory named %r already in dirstate') % f)
+ raise util.Abort(_('directory %r already in dirstate') % f)
for c in strutil.rfindall(f, '/'):
d = f[:c]
if d in self._dirs:
break
if d in self._map:
- raise util.Abort(_('file named %r already in dirstate') % d)
+ raise util.Abort(_('file %r in dirstate clashes with %r') %
+ (d, f))
self._incpath(f)
- def update(self, files, state, **kw):
- ''' current states:
- n normal
- m needs merging
- r marked for removal
- a marked for addition'''
+ def normal(self, f):
+ 'mark a file normal'
+ self._dirty = True
+ s = os.lstat(self._join(f))
+ self._map[f] = ('n', s.st_mode, s.st_size, s.st_mtime)
+ if self._copymap.has_key(f):
+ del self._copymap[f]
- if not files: return
- self.markdirty()
- for f in files:
- if self._copymap.has_key(f):
- del self._copymap[f]
+ def normaldirty(self, f):
+ 'mark a file normal, but possibly dirty'
+ self._dirty = True
+ s = os.lstat(self._join(f))
+ self._map[f] = ('n', s.st_mode, -1, -1)
+ if f in self._copymap:
+ del self._copymap[f]
+
+ def add(self, f):
+ 'mark a file added'
+ self._dirty = True
+ self._incpathcheck(f)
+ self._map[f] = ('a', 0, -1, -1)
+ if f in self._copymap:
+ del self._copymap[f]
- if state == "r":
- self._map[f] = ('r', 0, 0, 0)
- self._decpath(f)
- continue
- else:
- if state == "a":
- self._incpathcheck(f)
- s = os.lstat(self.wjoin(f))
- st_size = kw.get('st_size', s.st_size)
- st_mtime = kw.get('st_mtime', s.st_mtime)
- self._map[f] = (state, s.st_mode, st_size, st_mtime)
+ def remove(self, f):
+ 'mark a file removed'
+ self._dirty = True
+ self._map[f] = ('r', 0, 0, 0)
+ self._decpath(f)
+ if f in self._copymap:
+ del self._copymap[f]
- def forget(self, files):
- if not files: return
- self.markdirty()
- for f in files:
- try:
- del self._map[f]
- self._decpath(f)
- except KeyError:
- self._ui.warn(_("not in dirstate: %s!\n") % f)
- pass
+ def merge(self, f):
+ 'mark a file merged'
+ self._dirty = True
+ s = os.lstat(self._join(f))
+ self._map[f] = ('m', s.st_mode, s.st_size, s.st_mtime)
+ if f in self._copymap:
+ del self._copymap[f]
+
+ def forget(self, f):
+ 'forget a file'
+ self._dirty = True
+ try:
+ del self._map[f]
+ self._decpath(f)
+ except KeyError:
+ self._ui.warn(_("not in dirstate: %s!\n") % f)
def clear(self):
self._map = {}
self._copymap = {}
self._pl = [nullid, nullid]
- self.markdirty()
+ self._dirty = True
def rebuild(self, parent, files):
self.clear()
@@ -254,7 +264,7 @@
else:
self._map[f] = ('n', 0666, -1, 0)
self._pl = (parent, nullid)
- self.markdirty()
+ self._dirty = True
def write(self):
if not self._dirty:
@@ -271,10 +281,9 @@
st = self._opener("dirstate", "w", atomictemp=True)
st.write(cs.getvalue())
st.rename()
- self._dirty = 0
- self._dirtypl = 0
+ self._dirty = self._dirtypl = False
- def filterfiles(self, files):
+ def _filter(self, files):
ret = {}
unknown = []
@@ -304,16 +313,16 @@
bs += 1
return ret
- def _supported(self, f, st, verbose=False):
- if stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode):
+ def _supported(self, f, mode, verbose=False):
+ if stat.S_ISREG(mode) or stat.S_ISLNK(mode):
return True
if verbose:
kind = 'unknown'
- if stat.S_ISCHR(st.st_mode): kind = _('character device')
- elif stat.S_ISBLK(st.st_mode): kind = _('block device')
- elif stat.S_ISFIFO(st.st_mode): kind = _('fifo')
- elif stat.S_ISSOCK(st.st_mode): kind = _('socket')
- elif stat.S_ISDIR(st.st_mode): kind = _('directory')
+ if stat.S_ISCHR(mode): kind = _('character device')
+ elif stat.S_ISBLK(mode): kind = _('block device')
+ elif stat.S_ISFIFO(mode): kind = _('fifo')
+ elif stat.S_ISSOCK(mode): kind = _('socket')
+ elif stat.S_ISDIR(mode): kind = _('directory')
self._ui.warn(_('%s: unsupported file type (type is %s)\n')
% (self.pathto(f), kind))
return False
@@ -345,7 +354,7 @@
dc = self._map.copy()
else:
files = util.unique(files)
- dc = self.filterfiles(files)
+ dc = self._filter(files)
def imatch(file_):
if file_ not in dc and self._ignore(file_):
@@ -361,59 +370,73 @@
common_prefix_len = len(self._root)
if not self._root.endswith(os.sep):
common_prefix_len += 1
+
+ normpath = util.normpath
+ listdir = os.listdir
+ lstat = os.lstat
+ bisect_left = bisect.bisect_left
+ isdir = os.path.isdir
+ pconvert = util.pconvert
+ join = os.path.join
+ s_isdir = stat.S_ISDIR
+ supported = self._supported
+ _join = self._join
+ known = {'.hg': 1}
+
# recursion free walker, faster than os.walk.
def findfiles(s):
work = [s]
+ wadd = work.append
+ found = []
+ add = found.append
if directories:
- yield 'd', util.normpath(s[common_prefix_len:]), os.lstat(s)
+ add((normpath(s[common_prefix_len:]), 'd', lstat(s)))
while work:
top = work.pop()
- names = os.listdir(top)
+ names = listdir(top)
names.sort()
# nd is the top of the repository dir tree
- nd = util.normpath(top[common_prefix_len:])
+ nd = normpath(top[common_prefix_len:])
if nd == '.':
nd = ''
else:
# do not recurse into a repo contained in this
# one. use bisect to find .hg directory so speed
# is good on big directory.
- hg = bisect.bisect_left(names, '.hg')
+ hg = bisect_left(names, '.hg')
if hg < len(names) and names[hg] == '.hg':
- if os.path.isdir(os.path.join(top, '.hg')):
+ if isdir(join(top, '.hg')):
continue
for f in names:
- np = util.pconvert(os.path.join(nd, f))
- if seen(np):
+ np = pconvert(join(nd, f))
+ if np in known:
continue
- p = os.path.join(top, f)
+ known[np] = 1
+ p = join(top, f)
# don't trip over symlinks
- st = os.lstat(p)
- if stat.S_ISDIR(st.st_mode):
+ st = lstat(p)
+ if s_isdir(st.st_mode):
if not ignore(np):
- work.append(p)
+ wadd(p)
if directories:
- yield 'd', np, st
- if imatch(np) and np in dc:
- yield 'm', np, st
+ add((np, 'd', st))
+ if np in dc and match(np):
+ add((np, 'm', st))
elif imatch(np):
- if self._supported(np, st):
- yield 'f', np, st
+ if supported(np, st.st_mode):
+ add((np, 'f', st))
elif np in dc:
- yield 'm', np, st
-
- known = {'.hg': 1}
- def seen(fn):
- if fn in known: return True
- known[fn] = 1
+ add((np, 'm', st))
+ found.sort()
+ return found
# step one, find all files that match our criteria
files.sort()
for ff in files:
- nf = util.normpath(ff)
- f = self.wjoin(ff)
+ nf = normpath(ff)
+ f = _join(ff)
try:
- st = os.lstat(f)
+ st = lstat(f)
except OSError, inst:
found = False
for fn in dc:
@@ -427,15 +450,15 @@
elif badmatch and badmatch(ff) and imatch(nf):
yield 'b', ff, None
continue
- if stat.S_ISDIR(st.st_mode):
- cmp1 = (lambda x, y: cmp(x[1], y[1]))
- sorted_ = [ x for x in findfiles(f) ]
- sorted_.sort(cmp1)
- for e in sorted_:
- yield e
+ if s_isdir(st.st_mode):
+ for f, src, st in findfiles(f):
+ yield src, f, st
else:
- if not seen(nf) and match(nf):
- if self._supported(ff, st, verbose=True):
+ if nf in known:
+ continue
+ known[nf] = 1
+ if match(nf):
+ if supported(ff, st.st_mode, verbose=True):
yield 'f', nf, st
elif ff in dc:
yield 'm', nf, st
@@ -445,58 +468,73 @@
ks = dc.keys()
ks.sort()
for k in ks:
- if not seen(k) and imatch(k):
+ if k in known:
+ continue
+ known[k] = 1
+ if imatch(k):
yield 'm', k, None
- def status(self, files=None, match=util.always, list_ignored=False,
- list_clean=False):
+ def status(self, files, match, list_ignored, list_clean):
lookup, modified, added, unknown, ignored = [], [], [], [], []
removed, deleted, clean = [], [], []
+ _join = self._join
+ lstat = os.lstat
+ cmap = self._copymap
+ dmap = self._map
+ ladd = lookup.append
+ madd = modified.append
+ aadd = added.append
+ uadd = unknown.append
+ iadd = ignored.append
+ radd = removed.append
+ dadd = deleted.append
+ cadd = clean.append
+
for src, fn, st in self.statwalk(files, match, ignored=list_ignored):
- try:
- type_, mode, size, time = self[fn]
- except KeyError:
+ if fn in dmap:
+ type_, mode, size, time = dmap[fn]
+ else:
if list_ignored and self._ignore(fn):
- ignored.append(fn)
+ iadd(fn)
else:
- unknown.append(fn)
+ uadd(fn)
continue
if src == 'm':
nonexistent = True
if not st:
try:
- st = os.lstat(self.wjoin(fn))
+ st = lstat(_join(fn))
except OSError, inst:
if inst.errno != errno.ENOENT:
raise
st = None
# We need to re-check that it is a valid file
- if st and self._supported(fn, st):
+ if st and self._supported(fn, st.st_mode):
nonexistent = False
# XXX: what to do with file no longer present in the fs
# who are not removed in the dirstate ?
if nonexistent and type_ in "nm":
- deleted.append(fn)
+ dadd(fn)
continue
# check the common case first
if type_ == 'n':
if not st:
- st = os.lstat(self.wjoin(fn))
+ st = lstat(_join(fn))
if (size >= 0 and (size != st.st_size
or (mode ^ st.st_mode) & 0100)
or fn in self._copymap):
- modified.append(fn)
+ madd(fn)
elif time != int(st.st_mtime):
- lookup.append(fn)
+ ladd(fn)
elif list_clean:
- clean.append(fn)
+ cadd(fn)
elif type_ == 'm':
- modified.append(fn)
+ madd(fn)
elif type_ == 'a':
- added.append(fn)
+ aadd(fn)
elif type_ == 'r':
- removed.append(fn)
+ radd(fn)
return (lookup, modified, added, removed, deleted, unknown, ignored,
clean)
--- a/mercurial/extensions.py Wed Aug 08 22:47:30 2007 +0200
+++ b/mercurial/extensions.py Wed Aug 08 23:00:01 2007 +0200
@@ -6,10 +6,12 @@
# of the GNU General Public License, incorporated herein by reference.
import imp, os
-import commands, hg, util, sys
+import util, sys
from i18n import _
_extensions = {}
+commandtable = {}
+setuphooks = []
def find(name):
'''return module with given extension name'''
@@ -22,7 +24,11 @@
raise KeyError(name)
def load(ui, name, path):
- if name in _extensions:
+ if name.startswith('hgext.'):
+ shortname = name[6:]
+ else:
+ shortname = name
+ if shortname in _extensions:
return
if path:
# the module will be loaded in sys.modules
@@ -47,20 +53,20 @@
mod = importh("hgext.%s" % name)
except ImportError:
mod = importh(name)
- _extensions[name] = mod
+ _extensions[shortname] = mod
uisetup = getattr(mod, 'uisetup', None)
if uisetup:
uisetup(ui)
reposetup = getattr(mod, 'reposetup', None)
if reposetup:
- hg.repo_setup_hooks.append(reposetup)
+ setuphooks.append(reposetup)
cmdtable = getattr(mod, 'cmdtable', {})
- overrides = [cmd for cmd in cmdtable if cmd in commands.table]
+ overrides = [cmd for cmd in cmdtable if cmd in commandtable]
if overrides:
ui.warn(_("extension '%s' overrides commands: %s\n")
% (name, " ".join(overrides)))
- commands.table.update(cmdtable)
+ commandtable.update(cmdtable)
def loadall(ui):
result = ui.configitems("extensions")
--- a/mercurial/hg.py Wed Aug 08 22:47:30 2007 +0200
+++ b/mercurial/hg.py Wed Aug 08 23:00:01 2007 +0200
@@ -10,7 +10,7 @@
from repo import *
from i18n import _
import localrepo, bundlerepo, httprepo, sshrepo, statichttprepo
-import errno, lock, os, shutil, util, cmdutil
+import errno, lock, os, shutil, util, cmdutil, extensions
import merge as _merge
import verify as _verify
@@ -21,13 +21,11 @@
schemes = {
'bundle': bundlerepo,
'file': _local,
- 'hg': httprepo,
'http': httprepo,
'https': httprepo,
- 'old-http': statichttprepo,
'ssh': sshrepo,
'static-http': statichttprepo,
- }
+}
def _lookup(path):
scheme = 'file'
@@ -50,13 +48,11 @@
return False
return repo.local()
-repo_setup_hooks = []
-
def repository(ui, path='', create=False):
"""return a repository object for the specified path"""
repo = _lookup(path).instance(ui, path, create)
ui = getattr(repo, "ui", ui)
- for hook in repo_setup_hooks:
+ for hook in extensions.setuphooks:
hook(ui, repo)
return repo
@@ -134,103 +130,99 @@
if self.dir_:
self.rmtree(self.dir_, True)
- dir_cleanup = None
- if islocal(dest):
- dir_cleanup = DirCleanup(dest)
+ src_lock = dest_lock = dir_cleanup = None
+ try:
+ if islocal(dest):
+ dir_cleanup = DirCleanup(dest)
- abspath = origsource
- copy = False
- if src_repo.local() and islocal(dest):
- abspath = os.path.abspath(origsource)
- copy = not pull and not rev
+ abspath = origsource
+ copy = False
+ if src_repo.local() and islocal(dest):
+ abspath = os.path.abspath(origsource)
+ copy = not pull and not rev
- src_lock, dest_lock = None, None
- if copy:
- try:
- # we use a lock here because if we race with commit, we
- # can end up with extra data in the cloned revlogs that's
- # not pointed to by changesets, thus causing verify to
- # fail
- src_lock = src_repo.lock()
- except lock.LockException:
- copy = False
+ if copy:
+ try:
+ # we use a lock here because if we race with commit, we
+ # can end up with extra data in the cloned revlogs that's
+ # not pointed to by changesets, thus causing verify to
+ # fail
+ src_lock = src_repo.lock()
+ except lock.LockException:
+ copy = False
- if copy:
- def force_copy(src, dst):
- try:
- util.copyfiles(src, dst)
- except OSError, inst:
- if inst.errno != errno.ENOENT:
- raise
+ if copy:
+ def force_copy(src, dst):
+ try:
+ util.copyfiles(src, dst)
+ except OSError, inst:
+ if inst.errno != errno.ENOENT:
+ raise
- src_store = os.path.realpath(src_repo.spath)
- if not os.path.exists(dest):
- os.mkdir(dest)
- dest_path = os.path.realpath(os.path.join(dest, ".hg"))
- os.mkdir(dest_path)
- if src_repo.spath != src_repo.path:
- dest_store = os.path.join(dest_path, "store")
- os.mkdir(dest_store)
- else:
- dest_store = dest_path
- # copy the requires file
- force_copy(src_repo.join("requires"),
- os.path.join(dest_path, "requires"))
- # we lock here to avoid premature writing to the target
- dest_lock = lock.lock(os.path.join(dest_store, "lock"))
+ src_store = os.path.realpath(src_repo.spath)
+ if not os.path.exists(dest):
+ os.mkdir(dest)
+ dest_path = os.path.realpath(os.path.join(dest, ".hg"))
+ os.mkdir(dest_path)
+ if src_repo.spath != src_repo.path:
+ dest_store = os.path.join(dest_path, "store")
+ os.mkdir(dest_store)
+ else:
+ dest_store = dest_path
+ # copy the requires file
+ force_copy(src_repo.join("requires"),
+ os.path.join(dest_path, "requires"))
+ # we lock here to avoid premature writing to the target
+ dest_lock = lock.lock(os.path.join(dest_store, "lock"))
- files = ("data",
- "00manifest.d", "00manifest.i",
- "00changelog.d", "00changelog.i")
- for f in files:
- src = os.path.join(src_store, f)
- dst = os.path.join(dest_store, f)
- force_copy(src, dst)
+ files = ("data",
+ "00manifest.d", "00manifest.i",
+ "00changelog.d", "00changelog.i")
+ for f in files:
+ src = os.path.join(src_store, f)
+ dst = os.path.join(dest_store, f)
+ force_copy(src, dst)
+
+ # we need to re-init the repo after manually copying the data
+ # into it
+ dest_repo = repository(ui, dest)
+
+ else:
+ dest_repo = repository(ui, dest, create=True)
- # we need to re-init the repo after manually copying the data
- # into it
- dest_repo = repository(ui, dest)
-
- else:
- dest_repo = repository(ui, dest, create=True)
+ revs = None
+ if rev:
+ if 'lookup' not in src_repo.capabilities:
+ raise util.Abort(_("src repository does not support revision "
+ "lookup and so doesn't support clone by "
+ "revision"))
+ revs = [src_repo.lookup(r) for r in rev]
- revs = None
- if rev:
- if 'lookup' not in src_repo.capabilities:
- raise util.Abort(_("src repository does not support revision "
- "lookup and so doesn't support clone by "
- "revision"))
- revs = [src_repo.lookup(r) for r in rev]
+ if dest_repo.local():
+ dest_repo.clone(src_repo, heads=revs, stream=stream)
+ elif src_repo.local():
+ src_repo.push(dest_repo, revs=revs)
+ else:
+ raise util.Abort(_("clone from remote to remote not supported"))
if dest_repo.local():
- dest_repo.clone(src_repo, heads=revs, stream=stream)
- elif src_repo.local():
- src_repo.push(dest_repo, revs=revs)
- else:
- raise util.Abort(_("clone from remote to remote not supported"))
-
- if src_lock:
- src_lock.release()
+ fp = dest_repo.opener("hgrc", "w", text=True)
+ fp.write("[paths]\n")
+ fp.write("default = %s\n" % abspath)
+ fp.close()
- if dest_repo.local():
- fp = dest_repo.opener("hgrc", "w", text=True)
- fp.write("[paths]\n")
- fp.write("default = %s\n" % abspath)
- fp.close()
-
- if dest_lock:
- dest_lock.release()
+ if update:
+ try:
+ checkout = dest_repo.lookup("default")
+ except:
+ checkout = dest_repo.changelog.tip()
+ _update(dest_repo, checkout)
+ if dir_cleanup:
+ dir_cleanup.close()
- if update:
- try:
- checkout = dest_repo.lookup("default")
- except:
- checkout = dest_repo.changelog.tip()
- _update(dest_repo, checkout)
- if dir_cleanup:
- dir_cleanup.close()
-
- return src_repo, dest_repo
+ return src_repo, dest_repo
+ finally:
+ del src_lock, dest_lock, dir_cleanup
def _showstats(repo, stats):
stats = ((stats[0], _("updated")),
@@ -245,7 +237,7 @@
def update(repo, node):
"""update the working directory to node, merging linear changes"""
pl = repo.parents()
- stats = _merge.update(repo, node, False, False, None, None)
+ stats = _merge.update(repo, node, False, False, None)
_showstats(repo, stats)
if stats[3]:
repo.ui.status(_("There are unresolved merges with"
@@ -259,15 +251,15 @@
% (pl[0].rev(), repo.changectx(node).rev()))
return stats[3]
-def clean(repo, node, wlock=None, show_stats=True):
+def clean(repo, node, show_stats=True):
"""forcibly switch the working directory to node, clobbering changes"""
- stats = _merge.update(repo, node, False, True, None, wlock)
+ stats = _merge.update(repo, node, False, True, None)
if show_stats: _showstats(repo, stats)
return stats[3]
-def merge(repo, node, force=None, remind=True, wlock=None):
+def merge(repo, node, force=None, remind=True):
"""branch merge with node, resolving changes"""
- stats = _merge.update(repo, node, True, force, False, wlock)
+ stats = _merge.update(repo, node, True, force, False)
_showstats(repo, stats)
if stats[3]:
pl = repo.parents()
@@ -280,9 +272,9 @@
repo.ui.status(_("(branch merge, don't forget to commit)\n"))
return stats[3]
-def revert(repo, node, choose, wlock):
+def revert(repo, node, choose):
"""revert changes to revision in node without updating dirstate"""
- return _merge.update(repo, node, False, True, choose, wlock)[3]
+ return _merge.update(repo, node, False, True, choose)[3]
def verify(repo):
"""verify the consistency of a repository"""
--- a/mercurial/hgweb/hgweb_mod.py Wed Aug 08 22:47:30 2007 +0200
+++ b/mercurial/hgweb/hgweb_mod.py Wed Aug 08 23:00:01 2007 +0200
@@ -64,7 +64,7 @@
class hgweb(object):
def __init__(self, repo, name=None):
- if type(repo) == type(""):
+ if isinstance(repo, str):
self.repo = hg.repository(ui.ui(report_untrusted=False), repo)
else:
self.repo = repo
@@ -787,9 +787,17 @@
style = req.form['style'][0]
mapfile = style_map(self.templatepath, style)
+ proto = req.env.get('wsgi.url_scheme')
+ if proto == 'https':
+ proto = 'https'
+ default_port = "443"
+ else:
+ proto = 'http'
+ default_port = "80"
+
port = req.env["SERVER_PORT"]
- port = port != "80" and (":" + port) or ""
- urlbase = 'http://%s%s' % (req.env['SERVER_NAME'], port)
+ port = port != default_port and (":" + port) or ""
+ urlbase = '%s://%s%s' % (proto, req.env['SERVER_NAME'], port)
staticurl = self.config("web", "staticurl") or req.url + 'static/'
if not staticurl.endswith('/'):
staticurl += '/'
@@ -1063,7 +1071,7 @@
# replayed
ssl_req = self.configbool('web', 'push_ssl', True)
if ssl_req:
- if not req.env.get('HTTPS'):
+ if req.env.get('wsgi.url_scheme') != 'https':
bail(_('ssl required\n'))
return
proto = 'https'
@@ -1160,7 +1168,7 @@
req.write('%d\n' % ret)
req.write(val)
finally:
- lock.release()
+ del lock
except (OSError, IOError), inst:
req.write('0\n')
filename = getattr(inst, 'filename', '')
--- a/mercurial/hgweb/hgwebdir_mod.py Wed Aug 08 22:47:30 2007 +0200
+++ b/mercurial/hgweb/hgwebdir_mod.py Wed Aug 08 23:00:01 2007 +0200
@@ -91,8 +91,12 @@
url = req.env['REQUEST_URI'].split('?')[0]
if not url.endswith('/'):
url += '/'
+ pathinfo = req.env.get('PATH_INFO', '').strip('/') + '/'
+ base = url[:len(url) - len(pathinfo)]
+ if not base.endswith('/'):
+ base += '/'
- staticurl = config('web', 'staticurl') or url + 'static/'
+ staticurl = config('web', 'staticurl') or base + 'static/'
if not staticurl.endswith('/'):
staticurl += '/'
@@ -119,7 +123,7 @@
yield {"type" : i[0], "extension": i[1],
"node": nodeid, "url": url}
- def entries(sortcolumn="", descending=False, **map):
+ def entries(sortcolumn="", descending=False, subdir="", **map):
def sessionvars(**map):
fields = []
if req.form.has_key('style'):
@@ -135,6 +139,10 @@
rows = []
parity = paritygen(self.stripecount)
for name, path in self.repos:
+ if not name.startswith(subdir):
+ continue
+ name = name[len(subdir):]
+
u = ui.ui(parentui=parentui)
try:
u.readconfig(os.path.join(path, '.hg', 'hgrc'))
@@ -186,6 +194,25 @@
row['parity'] = parity.next()
yield row
+ def makeindex(req, subdir=""):
+ sortable = ["name", "description", "contact", "lastchange"]
+ sortcolumn, descending = self.repos_sorted
+ if req.form.has_key('sort'):
+ sortcolumn = req.form['sort'][0]
+ descending = sortcolumn.startswith('-')
+ if descending:
+ sortcolumn = sortcolumn[1:]
+ if sortcolumn not in sortable:
+ sortcolumn = ""
+
+ sort = [("sort_%s" % column,
+ "%s%s" % ((not descending and column == sortcolumn)
+ and "-" or "", column))
+ for column in sortable]
+ req.write(tmpl("index", entries=entries, subdir=subdir,
+ sortcolumn=sortcolumn, descending=descending,
+ **dict(sort)))
+
try:
virtual = req.env.get("PATH_INFO", "").strip('/')
if virtual.startswith('static/'):
@@ -194,25 +221,32 @@
req.write(staticfile(static, fname, req) or
tmpl('error', error='%r not found' % fname))
elif virtual:
+ repos = dict(self.repos)
while virtual:
- real = dict(self.repos).get(virtual)
+ real = repos.get(virtual)
if real:
- break
+ req.env['REPO_NAME'] = virtual
+ try:
+ repo = hg.repository(parentui, real)
+ hgweb(repo).run_wsgi(req)
+ except IOError, inst:
+ req.write(tmpl("error", error=inst.strerror))
+ except hg.RepoError, inst:
+ req.write(tmpl("error", error=str(inst)))
+ return
+
+ # browse subdirectories
+ subdir = virtual + '/'
+ if [r for r in repos if r.startswith(subdir)]:
+ makeindex(req, subdir)
+ return
+
up = virtual.rfind('/')
if up < 0:
break
virtual = virtual[:up]
- if real:
- req.env['REPO_NAME'] = virtual
- try:
- repo = hg.repository(parentui, real)
- hgweb(repo).run_wsgi(req)
- except IOError, inst:
- req.write(tmpl("error", error=inst.strerror))
- except hg.RepoError, inst:
- req.write(tmpl("error", error=str(inst)))
- else:
- req.write(tmpl("notfound", repo=virtual))
+
+ req.write(tmpl("notfound", repo=virtual))
else:
if req.form.has_key('static'):
static = os.path.join(templater.templatepath(), "static")
@@ -220,22 +254,6 @@
req.write(staticfile(static, fname, req)
or tmpl("error", error="%r not found" % fname))
else:
- sortable = ["name", "description", "contact", "lastchange"]
- sortcolumn, descending = self.repos_sorted
- if req.form.has_key('sort'):
- sortcolumn = req.form['sort'][0]
- descending = sortcolumn.startswith('-')
- if descending:
- sortcolumn = sortcolumn[1:]
- if sortcolumn not in sortable:
- sortcolumn = ""
-
- sort = [("sort_%s" % column,
- "%s%s" % ((not descending and column == sortcolumn)
- and "-" or "", column))
- for column in sortable]
- req.write(tmpl("index", entries=entries,
- sortcolumn=sortcolumn, descending=descending,
- **dict(sort)))
+ makeindex(req)
finally:
tmpl = None
--- a/mercurial/hgweb/server.py Wed Aug 08 22:47:30 2007 +0200
+++ b/mercurial/hgweb/server.py Wed Aug 08 23:00:01 2007 +0200
@@ -37,6 +37,9 @@
self.handler.log_error("HG error: %s", msg)
class _hgwebhandler(object, BaseHTTPServer.BaseHTTPRequestHandler):
+
+ url_scheme = 'http'
+
def __init__(self, *args, **kargs):
self.protocol_version = 'HTTP/1.1'
BaseHTTPServer.BaseHTTPRequestHandler.__init__(self, *args, **kargs)
@@ -53,13 +56,16 @@
self.log_date_time_string(),
format % args))
+ def do_write(self):
+ try:
+ self.do_hgweb()
+ except socket.error, inst:
+ if inst[0] != errno.EPIPE:
+ raise
+
def do_POST(self):
try:
- try:
- self.do_hgweb()
- except socket.error, inst:
- if inst[0] != errno.EPIPE:
- raise
+ self.do_write()
except StandardError, inst:
self._start_response("500 Internal Server Error", [])
self._write("Internal Server Error")
@@ -101,7 +107,7 @@
env[hkey] = hval
env['SERVER_PROTOCOL'] = self.request_version
env['wsgi.version'] = (1, 0)
- env['wsgi.url_scheme'] = 'http'
+ env['wsgi.url_scheme'] = self.url_scheme
env['wsgi.input'] = self.rfile
env['wsgi.errors'] = _error_logger(self)
env['wsgi.multithread'] = isinstance(self.server,
@@ -164,6 +170,31 @@
self.wfile.write(data)
self.wfile.flush()
+class _shgwebhandler(_hgwebhandler):
+
+ url_scheme = 'https'
+
+ def setup(self):
+ self.connection = self.request
+ self.rfile = socket._fileobject(self.request, "rb", self.rbufsize)
+ self.wfile = socket._fileobject(self.request, "wb", self.wbufsize)
+
+ def do_write(self):
+ from OpenSSL.SSL import SysCallError
+ try:
+ super(_shgwebhandler, self).do_write()
+ except SysCallError, inst:
+ if inst.args[0] != errno.EPIPE:
+ raise
+
+ def handle_one_request(self):
+ from OpenSSL.SSL import SysCallError, ZeroReturnError
+ try:
+ super(_shgwebhandler, self).handle_one_request()
+ except (SysCallError, ZeroReturnError):
+ self.close_connection = True
+ pass
+
def create_server(ui, repo):
use_threads = True
@@ -192,6 +223,7 @@
port = int(getconfig("port", 8000))
use_ipv6 = getconfigbool("ipv6")
webdir_conf = getconfig("webdir_conf")
+ ssl_cert = getconfig("certificate")
accesslog = openlog(getconfig("accesslog", "-"), sys.stdout)
errorlog = openlog(getconfig("errorlog", "-"), sys.stderr)
@@ -238,6 +270,19 @@
self.addr, self.port = addr, port
+ if ssl_cert:
+ try:
+ from OpenSSL import SSL
+ ctx = SSL.Context(SSL.SSLv23_METHOD)
+ except ImportError:
+ raise util.Abort("SSL support is unavailable")
+ ctx.use_privatekey_file(ssl_cert)
+ ctx.use_certificate_file(ssl_cert)
+ sock = socket.socket(self.address_family, self.socket_type)
+ self.socket = SSL.Connection(ctx, sock)
+ self.server_bind()
+ self.server_activate()
+
class IPv6HTTPServer(MercurialHTTPServer):
address_family = getattr(socket, 'AF_INET6', None)
@@ -246,10 +291,15 @@
raise hg.RepoError(_('IPv6 not available on this system'))
super(IPv6HTTPServer, self).__init__(*args, **kwargs)
+ if ssl_cert:
+ handler = _shgwebhandler
+ else:
+ handler = _hgwebhandler
+
try:
if use_ipv6:
- return IPv6HTTPServer((address, port), _hgwebhandler)
+ return IPv6HTTPServer((address, port), handler)
else:
- return MercurialHTTPServer((address, port), _hgwebhandler)
+ return MercurialHTTPServer((address, port), handler)
except socket.error, inst:
raise util.Abort(_('cannot start server: %s') % inst.args[1])
--- a/mercurial/hgweb/wsgicgi.py Wed Aug 08 22:47:30 2007 +0200
+++ b/mercurial/hgweb/wsgicgi.py Wed Aug 08 23:00:01 2007 +0200
@@ -23,7 +23,7 @@
environ['wsgi.multiprocess'] = True
environ['wsgi.run_once'] = True
- if environ.get('HTTPS','off') in ('on','1'):
+ if environ.get('HTTPS','off').lower() in ('on','1','yes'):
environ['wsgi.url_scheme'] = 'https'
else:
environ['wsgi.url_scheme'] = 'http'
--- a/mercurial/httprepo.py Wed Aug 08 22:47:30 2007 +0200
+++ b/mercurial/httprepo.py Wed Aug 08 23:00:01 2007 +0200
@@ -449,9 +449,6 @@
def instance(ui, path, create):
if create:
raise util.Abort(_('cannot create new http repository'))
- if path.startswith('hg:'):
- ui.warn(_("hg:// syntax is deprecated, please use http:// instead\n"))
- path = 'http:' + path[3:]
if path.startswith('https:'):
return httpsrepository(ui, path)
return httprepository(ui, path)
--- a/mercurial/localrepo.py Wed Aug 08 22:47:30 2007 +0200
+++ b/mercurial/localrepo.py Wed Aug 08 23:00:01 2007 +0200
@@ -8,7 +8,7 @@
from node import *
from i18n import _
import repo, changegroup
-import changelog, dirstate, filelog, manifest, context
+import changelog, dirstate, filelog, manifest, context, weakref
import re, lock, transaction, tempfile, stat, mdiff, errno, ui
import os, revlog, time, util, extensions, hook
@@ -16,8 +16,6 @@
capabilities = ('lookup', 'changegroupsubset')
supported = ('revlogv1', 'store')
- def __del__(self):
- self.transhandle = None
def __init__(self, parentui, path=None, create=0):
repo.repository.__init__(self)
self.path = path
@@ -84,7 +82,7 @@
self.branchcache = None
self.nodetagscache = None
self.filterpats = {}
- self.transhandle = None
+ self._transref = self._lockref = self._wlockref = None
def __getattr__(self, name):
if name == 'changelog':
@@ -109,7 +107,8 @@
tag_disallowed = ':\r\n'
- def _tag(self, name, node, message, local, user, date, parent=None):
+ def _tag(self, name, node, message, local, user, date, parent=None,
+ extra={}):
use_dirstate = parent is None
for c in self.tag_disallowed:
@@ -155,10 +154,11 @@
# committed tags are stored in UTF-8
writetag(fp, name, util.fromlocal, prevtags)
- if use_dirstate and self.dirstate.state('.hgtags') == '?':
+ if use_dirstate and '.hgtags' not in self.dirstate:
self.add(['.hgtags'])
- tagnode = self.commit(['.hgtags'], message, user, date, p1=parent)
+ tagnode = self.commit(['.hgtags'], message, user, date, p1=parent,
+ extra=extra)
self.hook('tag', node=hex(node), tag=name, local=local)
@@ -394,6 +394,11 @@
n = self.changelog._partialmatch(key)
if n:
return n
+ try:
+ if len(key) == 20:
+ key = hex(key)
+ except:
+ pass
raise repo.RepoError(_("unknown revision '%s'") % key)
def dev(self):
@@ -493,9 +498,8 @@
return self._filter("decode", filename, data)
def transaction(self):
- tr = self.transhandle
- if tr != None and tr.running():
- return tr.nest()
+ if self._transref and self._transref():
+ return self._transref().nest()
# save dirstate for rollback
try:
@@ -509,33 +513,38 @@
tr = transaction.transaction(self.ui.warn, self.sopener,
self.sjoin("journal"),
aftertrans(renames))
- self.transhandle = tr
+ self._transref = weakref.ref(tr)
return tr
def recover(self):
l = self.lock()
- if os.path.exists(self.sjoin("journal")):
- self.ui.status(_("rolling back interrupted transaction\n"))
- transaction.rollback(self.sopener, self.sjoin("journal"))
- self.invalidate()
- return True
- else:
- self.ui.warn(_("no interrupted transaction available\n"))
- return False
+ try:
+ if os.path.exists(self.sjoin("journal")):
+ self.ui.status(_("rolling back interrupted transaction\n"))
+ transaction.rollback(self.sopener, self.sjoin("journal"))
+ self.invalidate()
+ return True
+ else:
+ self.ui.warn(_("no interrupted transaction available\n"))
+ return False
+ finally:
+ del l
- def rollback(self, wlock=None, lock=None):
- if not wlock:
+ def rollback(self):
+ wlock = lock = None
+ try:
wlock = self.wlock()
- if not lock:
lock = self.lock()
- if os.path.exists(self.sjoin("undo")):
- self.ui.status(_("rolling back last transaction\n"))
- transaction.rollback(self.sopener, self.sjoin("undo"))
- util.rename(self.join("undo.dirstate"), self.join("dirstate"))
- self.invalidate()
- self.dirstate.invalidate()
- else:
- self.ui.warn(_("no rollback information available\n"))
+ if os.path.exists(self.sjoin("undo")):
+ self.ui.status(_("rolling back last transaction\n"))
+ transaction.rollback(self.sopener, self.sjoin("undo"))
+ util.rename(self.join("undo.dirstate"), self.join("dirstate"))
+ self.invalidate()
+ self.dirstate.invalidate()
+ else:
+ self.ui.warn(_("no rollback information available\n"))
+ finally:
+ del lock, wlock
def invalidate(self):
for a in "changelog manifest".split():
@@ -544,8 +553,7 @@
self.tagscache = None
self.nodetagscache = None
- def do_lock(self, lockname, wait, releasefn=None, acquirefn=None,
- desc=None):
+ def _lock(self, lockname, wait, releasefn, acquirefn, desc):
try:
l = lock.lock(lockname, 0, releasefn, desc=desc)
except lock.LockHeld, inst:
@@ -560,17 +568,26 @@
acquirefn()
return l
- def lock(self, wait=1):
- return self.do_lock(self.sjoin("lock"), wait,
- acquirefn=self.invalidate,
- desc=_('repository %s') % self.origroot)
+ def lock(self, wait=True):
+ if self._lockref and self._lockref():
+ return self._lockref()
+
+ l = self._lock(self.sjoin("lock"), wait, None, self.invalidate,
+ _('repository %s') % self.origroot)
+ self._lockref = weakref.ref(l)
+ return l
- def wlock(self, wait=1):
- return self.do_lock(self.join("wlock"), wait, self.dirstate.write,
- self.dirstate.invalidate,
- desc=_('working directory of %s') % self.origroot)
+ def wlock(self, wait=True):
+ if self._wlockref and self._wlockref():
+ return self._wlockref()
- def filecommit(self, fn, manifest1, manifest2, linkrev, transaction, changelist):
+ l = self._lock(self.join("wlock"), wait, self.dirstate.write,
+ self.dirstate.invalidate, _('working directory of %s') %
+ self.origroot)
+ self._wlockref = weakref.ref(l)
+ return l
+
+ def filecommit(self, fn, manifest1, manifest2, linkrev, tr, changelist):
"""
commit an individual file as part of a larger transaction
"""
@@ -628,173 +645,182 @@
return fp1
changelist.append(fn)
- return fl.add(t, meta, transaction, linkrev, fp1, fp2)
+ return fl.add(t, meta, tr, linkrev, fp1, fp2)
- def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None, extra={}):
+ def rawcommit(self, files, text, user, date, p1=None, p2=None, extra={}):
if p1 is None:
p1, p2 = self.dirstate.parents()
return self.commit(files=files, text=text, user=user, date=date,
- p1=p1, p2=p2, wlock=wlock, extra=extra)
+ p1=p1, p2=p2, extra=extra, empty_ok=True)
def commit(self, files=None, text="", user=None, date=None,
- match=util.always, force=False, lock=None, wlock=None,
- force_editor=False, p1=None, p2=None, extra={}):
-
- commit = []
- remove = []
- changed = []
- use_dirstate = (p1 is None) # not rawcommit
- extra = extra.copy()
+ match=util.always, force=False, force_editor=False,
+ p1=None, p2=None, extra={}, empty_ok=False):
+ wlock = lock = tr = None
+ try:
+ commit = []
+ remove = []
+ changed = []
+ use_dirstate = (p1 is None) # not rawcommit
+ extra = extra.copy()
- if use_dirstate:
- if files:
- for f in files:
- s = self.dirstate.state(f)
- if s in 'nmai':
- commit.append(f)
- elif s == 'r':
- remove.append(f)
- else:
- self.ui.warn(_("%s not tracked!\n") % f)
+ if use_dirstate:
+ if files:
+ for f in files:
+ s = self.dirstate[f]
+ if s in 'nma':
+ commit.append(f)
+ elif s == 'r':
+ remove.append(f)
+ else:
+ self.ui.warn(_("%s not tracked!\n") % f)
+ else:
+ changes = self.status(match=match)[:5]
+ modified, added, removed, deleted, unknown = changes
+ commit = modified + added
+ remove = removed
else:
- changes = self.status(match=match)[:5]
- modified, added, removed, deleted, unknown = changes
- commit = modified + added
- remove = removed
- else:
- commit = files
+ commit = files
- if use_dirstate:
- p1, p2 = self.dirstate.parents()
- update_dirstate = True
- else:
- p1, p2 = p1, p2 or nullid
- update_dirstate = (self.dirstate.parents()[0] == p1)
+ if use_dirstate:
+ p1, p2 = self.dirstate.parents()
+ update_dirstate = True
+ else:
+ p1, p2 = p1, p2 or nullid
+ update_dirstate = (self.dirstate.parents()[0] == p1)
- c1 = self.changelog.read(p1)
- c2 = self.changelog.read(p2)
- m1 = self.manifest.read(c1[0]).copy()
- m2 = self.manifest.read(c2[0])
+ c1 = self.changelog.read(p1)
+ c2 = self.changelog.read(p2)
+ m1 = self.manifest.read(c1[0]).copy()
+ m2 = self.manifest.read(c2[0])
- if use_dirstate:
- branchname = self.workingctx().branch()
- try:
- branchname = branchname.decode('UTF-8').encode('UTF-8')
- except UnicodeDecodeError:
- raise util.Abort(_('branch name not in UTF-8!'))
- else:
- branchname = ""
+ if use_dirstate:
+ branchname = self.workingctx().branch()
+ try:
+ branchname = branchname.decode('UTF-8').encode('UTF-8')
+ except UnicodeDecodeError:
+ raise util.Abort(_('branch name not in UTF-8!'))
+ else:
+ branchname = ""
- if use_dirstate:
- oldname = c1[5].get("branch") # stored in UTF-8
- if (not commit and not remove and not force and p2 == nullid
- and branchname == oldname):
- self.ui.status(_("nothing changed\n"))
- return None
+ if use_dirstate:
+ oldname = c1[5].get("branch") # stored in UTF-8
+ if (not commit and not remove and not force and p2 == nullid
+ and branchname == oldname):
+ self.ui.status(_("nothing changed\n"))
+ return None
- xp1 = hex(p1)
- if p2 == nullid: xp2 = ''
- else: xp2 = hex(p2)
-
- self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
+ xp1 = hex(p1)
+ if p2 == nullid: xp2 = ''
+ else: xp2 = hex(p2)
- if not wlock:
+ self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
+
wlock = self.wlock()
- if not lock:
lock = self.lock()
- tr = self.transaction()
+ tr = self.transaction()
+ trp = weakref.proxy(tr)
- # check in files
- new = {}
- linkrev = self.changelog.count()
- commit.sort()
- is_exec = util.execfunc(self.root, m1.execf)
- is_link = util.linkfunc(self.root, m1.linkf)
- for f in commit:
- self.ui.note(f + "\n")
- try:
- new[f] = self.filecommit(f, m1, m2, linkrev, tr, changed)
- new_exec = is_exec(f)
- new_link = is_link(f)
- if not changed or changed[-1] != f:
- # mention the file in the changelog if some flag changed,
- # even if there was no content change.
- old_exec = m1.execf(f)
- old_link = m1.linkf(f)
- if old_exec != new_exec or old_link != new_link:
- changed.append(f)
- m1.set(f, new_exec, new_link)
- except (OSError, IOError):
- if use_dirstate:
- self.ui.warn(_("trouble committing %s!\n") % f)
- raise
- else:
- remove.append(f)
+ # check in files
+ new = {}
+ linkrev = self.changelog.count()
+ commit.sort()
+ is_exec = util.execfunc(self.root, m1.execf)
+ is_link = util.linkfunc(self.root, m1.linkf)
+ for f in commit:
+ self.ui.note(f + "\n")
+ try:
+ new[f] = self.filecommit(f, m1, m2, linkrev, trp, changed)
+ new_exec = is_exec(f)
+ new_link = is_link(f)
+ if not changed or changed[-1] != f:
+ # mention the file in the changelog if some
+ # flag changed, even if there was no content
+ # change.
+ old_exec = m1.execf(f)
+ old_link = m1.linkf(f)
+ if old_exec != new_exec or old_link != new_link:
+ changed.append(f)
+ m1.set(f, new_exec, new_link)
+ except (OSError, IOError):
+ if use_dirstate:
+ self.ui.warn(_("trouble committing %s!\n") % f)
+ raise
+ else:
+ remove.append(f)
- # update manifest
- m1.update(new)
- remove.sort()
- removed = []
+ # update manifest
+ m1.update(new)
+ remove.sort()
+ removed = []
- for f in remove:
- if f in m1:
- del m1[f]
- removed.append(f)
- elif f in m2:
- removed.append(f)
- mn = self.manifest.add(m1, tr, linkrev, c1[0], c2[0], (new, removed))
+ for f in remove:
+ if f in m1:
+ del m1[f]
+ removed.append(f)
+ elif f in m2:
+ removed.append(f)
+ mn = self.manifest.add(m1, trp, linkrev, c1[0], c2[0],
+ (new, removed))
- # add changeset
- new = new.keys()
- new.sort()
+ # add changeset
+ new = new.keys()
+ new.sort()
- user = user or self.ui.username()
- if not text or force_editor:
- edittext = []
- if text:
- edittext.append(text)
- edittext.append("")
- edittext.append("HG: user: %s" % user)
- if p2 != nullid:
- edittext.append("HG: branch merge")
+ user = user or self.ui.username()
+ if (not empty_ok and not text) or force_editor:
+ edittext = []
+ if text:
+ edittext.append(text)
+ edittext.append("")
+ edittext.append("HG: user: %s" % user)
+ if p2 != nullid:
+ edittext.append("HG: branch merge")
+ if branchname:
+ edittext.append("HG: branch %s" % util.tolocal(branchname))
+ edittext.extend(["HG: changed %s" % f for f in changed])
+ edittext.extend(["HG: removed %s" % f for f in removed])
+ if not changed and not remove:
+ edittext.append("HG: no files changed")
+ edittext.append("")
+ # run editor in the repository root
+ olddir = os.getcwd()
+ os.chdir(self.root)
+ text = self.ui.edit("\n".join(edittext), user)
+ os.chdir(olddir)
+
if branchname:
- edittext.append("HG: branch %s" % util.tolocal(branchname))
- edittext.extend(["HG: changed %s" % f for f in changed])
- edittext.extend(["HG: removed %s" % f for f in removed])
- if not changed and not remove:
- edittext.append("HG: no files changed")
- edittext.append("")
- # run editor in the repository root
- olddir = os.getcwd()
- os.chdir(self.root)
- text = self.ui.edit("\n".join(edittext), user)
- os.chdir(olddir)
+ extra["branch"] = branchname
- lines = [line.rstrip() for line in text.rstrip().splitlines()]
- while lines and not lines[0]:
- del lines[0]
- if not lines:
- return None
- text = '\n'.join(lines)
- if branchname:
- extra["branch"] = branchname
- n = self.changelog.add(mn, changed + removed, text, tr, p1, p2,
- user, date, extra)
- self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
- parent2=xp2)
- tr.close()
+ if use_dirstate:
+ lines = [line.rstrip() for line in text.rstrip().splitlines()]
+ while lines and not lines[0]:
+ del lines[0]
+ if not lines:
+ return None
+ text = '\n'.join(lines)
+
+ n = self.changelog.add(mn, changed + removed, text, trp, p1, p2,
+ user, date, extra)
+ self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
+ parent2=xp2)
+ tr.close()
- if self.branchcache and "branch" in extra:
- self.branchcache[util.tolocal(extra["branch"])] = n
+ if self.branchcache and "branch" in extra:
+ self.branchcache[util.tolocal(extra["branch"])] = n
- if use_dirstate or update_dirstate:
- self.dirstate.setparents(n)
- if use_dirstate:
- self.dirstate.update(new, "n")
- self.dirstate.forget(removed)
+ if use_dirstate or update_dirstate:
+ self.dirstate.setparents(n)
+ if use_dirstate:
+ for f in new:
+ self.dirstate.normal(f)
+ for f in removed:
+ self.dirstate.forget(f)
- self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
- return n
+ self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
+ return n
+ finally:
+ del tr, lock, wlock
def walk(self, node=None, files=[], match=util.always, badmatch=None):
'''
@@ -839,7 +865,7 @@
yield src, fn
def status(self, node1=None, node2=None, files=[], match=util.always,
- wlock=None, list_ignored=False, list_clean=False):
+ list_ignored=False, list_clean=False):
"""return status of files between two nodes or node and working directory
If node1 is None, use the first dirstate parent instead.
@@ -871,8 +897,6 @@
# all the revisions in parent->child order.
mf1 = mfmatches(node1)
- mywlock = False
-
# are we comparing the working directory?
if not node2:
(lookup, modified, added, removed, deleted, unknown,
@@ -882,24 +906,30 @@
# are we comparing working dir against its parent?
if compareworking:
if lookup:
+ fixup = []
# do a full compare of any files that might have changed
- mnode = self.changelog.read(self.dirstate.parents()[0])[0]
- getnode = lambda fn: (self.manifest.find(mnode, fn)[0] or
- nullid)
+ ctx = self.changectx()
for f in lookup:
- if fcmp(f, getnode):
+ if f not in ctx or ctx[f].cmp(self.wread(f)):
modified.append(f)
else:
+ fixup.append(f)
if list_clean:
clean.append(f)
- if not wlock and not mywlock:
- mywlock = True
- try:
- wlock = self.wlock(wait=0)
- except lock.LockException:
- pass
+
+ # update dirstate for files that are actually clean
+ if fixup:
+ wlock = None
+ try:
+ try:
+ wlock = self.wlock(False)
+ except lock.LockException:
+ pass
if wlock:
- self.dirstate.update([f], "n")
+ for f in fixup:
+ self.dirstate.normal(f)
+ finally:
+ del wlock
else:
# we are comparing working dir against non-parent
# generate a pseudo-manifest for the working dir
@@ -914,8 +944,6 @@
if f in mf2:
del mf2[f]
- if mywlock and wlock:
- wlock.release()
else:
# we are comparing two revisions
mf2 = mfmatches(node2)
@@ -948,85 +976,98 @@
l.sort()
return (modified, added, removed, deleted, unknown, ignored, clean)
- def add(self, list, wlock=None):
- if not wlock:
- wlock = self.wlock()
- for f in list:
- p = self.wjoin(f)
- try:
- st = os.lstat(p)
- except:
- self.ui.warn(_("%s does not exist!\n") % f)
- continue
- if st.st_size > 10000000:
- self.ui.warn(_("%s: files over 10MB may cause memory and"
- " performance problems\n"
- "(use 'hg revert %s' to unadd the file)\n")
- % (f, f))
- if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
- self.ui.warn(_("%s not added: only files and symlinks "
- "supported currently\n") % f)
- elif self.dirstate.state(f) in 'an':
- self.ui.warn(_("%s already tracked!\n") % f)
- else:
- self.dirstate.update([f], "a")
-
- def forget(self, list, wlock=None):
- if not wlock:
- wlock = self.wlock()
- for f in list:
- if self.dirstate.state(f) not in 'ai':
- self.ui.warn(_("%s not added!\n") % f)
- else:
- self.dirstate.forget([f])
-
- def remove(self, list, unlink=False, wlock=None):
- if unlink:
+ def add(self, list):
+ wlock = self.wlock()
+ try:
for f in list:
+ p = self.wjoin(f)
try:
- util.unlink(self.wjoin(f))
- except OSError, inst:
- if inst.errno != errno.ENOENT:
- raise
- if not wlock:
- wlock = self.wlock()
- for f in list:
- if unlink and os.path.exists(self.wjoin(f)):
- self.ui.warn(_("%s still exists!\n") % f)
- elif self.dirstate.state(f) == 'a':
- self.dirstate.forget([f])
- elif f not in self.dirstate:
- self.ui.warn(_("%s not tracked!\n") % f)
- else:
- self.dirstate.update([f], "r")
+ st = os.lstat(p)
+ except:
+ self.ui.warn(_("%s does not exist!\n") % f)
+ continue
+ if st.st_size > 10000000:
+ self.ui.warn(_("%s: files over 10MB may cause memory and"
+ " performance problems\n"
+ "(use 'hg revert %s' to unadd the file)\n")
+ % (f, f))
+ if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
+ self.ui.warn(_("%s not added: only files and symlinks "
+ "supported currently\n") % f)
+ elif self.dirstate[f] in 'an':
+ self.ui.warn(_("%s already tracked!\n") % f)
+ else:
+ self.dirstate.add(f)
+ finally:
+ del wlock
- def undelete(self, list, wlock=None):
- p = self.dirstate.parents()[0]
- mn = self.changelog.read(p)[0]
- m = self.manifest.read(mn)
- if not wlock:
+ def forget(self, list):
+ wlock = self.wlock()
+ try:
+ for f in list:
+ if self.dirstate[f] != 'a':
+ self.ui.warn(_("%s not added!\n") % f)
+ else:
+ self.dirstate.forget(f)
+ finally:
+ del wlock
+
+ def remove(self, list, unlink=False):
+ wlock = None
+ try:
+ if unlink:
+ for f in list:
+ try:
+ util.unlink(self.wjoin(f))
+ except OSError, inst:
+ if inst.errno != errno.ENOENT:
+ raise
wlock = self.wlock()
- for f in list:
- if self.dirstate.state(f) not in "r":
- self.ui.warn("%s not removed!\n" % f)
- else:
- t = self.file(f).read(m[f])
- self.wwrite(f, t, m.flags(f))
- self.dirstate.update([f], "n")
+ for f in list:
+ if unlink and os.path.exists(self.wjoin(f)):
+ self.ui.warn(_("%s still exists!\n") % f)
+ elif self.dirstate[f] == 'a':
+ self.dirstate.forget(f)
+ elif f not in self.dirstate:
+ self.ui.warn(_("%s not tracked!\n") % f)
+ else:
+ self.dirstate.remove(f)
+ finally:
+ del wlock
- def copy(self, source, dest, wlock=None):
- p = self.wjoin(dest)
- if not (os.path.exists(p) or os.path.islink(p)):
- self.ui.warn(_("%s does not exist!\n") % dest)
- elif not (os.path.isfile(p) or os.path.islink(p)):
- self.ui.warn(_("copy failed: %s is not a file or a "
- "symbolic link\n") % dest)
- else:
- if not wlock:
+ def undelete(self, list):
+ wlock = None
+ try:
+ p = self.dirstate.parents()[0]
+ mn = self.changelog.read(p)[0]
+ m = self.manifest.read(mn)
+ wlock = self.wlock()
+ for f in list:
+ if self.dirstate[f] != 'r':
+ self.ui.warn("%s not removed!\n" % f)
+ else:
+ t = self.file(f).read(m[f])
+ self.wwrite(f, t, m.flags(f))
+ self.dirstate.normal(f)
+ finally:
+ del wlock
+
+ def copy(self, source, dest):
+ wlock = None
+ try:
+ p = self.wjoin(dest)
+ if not (os.path.exists(p) or os.path.islink(p)):
+ self.ui.warn(_("%s does not exist!\n") % dest)
+ elif not (os.path.isfile(p) or os.path.islink(p)):
+ self.ui.warn(_("copy failed: %s is not a file or a "
+ "symbolic link\n") % dest)
+ else:
wlock = self.wlock()
- if self.dirstate.state(dest) == '?':
- self.dirstate.update([dest], "a")
- self.dirstate.copy(source, dest)
+ if dest not in self.dirstate:
+ self.dirstate.add(dest)
+ self.dirstate.copy(source, dest)
+ finally:
+ del wlock
def heads(self, start=None):
heads = self.changelog.heads(start)
@@ -1303,12 +1344,8 @@
else:
return subset
- def pull(self, remote, heads=None, force=False, lock=None):
- mylock = False
- if not lock:
- lock = self.lock()
- mylock = True
-
+ def pull(self, remote, heads=None, force=False):
+ lock = self.lock()
try:
fetch = self.findincoming(remote, force=force)
if fetch == [nullid]:
@@ -1326,8 +1363,7 @@
cg = remote.changegroupsubset(fetch, heads, 'pull')
return self.addchangegroup(cg, 'pull', remote.url())
finally:
- if mylock:
- lock.release()
+ del lock
def push(self, remote, force=False, revs=None):
# there are two ways to push to remote repo:
@@ -1400,12 +1436,14 @@
def push_addchangegroup(self, remote, force, revs):
lock = remote.lock()
-
- ret = self.prepush(remote, force, revs)
- if ret[0] is not None:
- cg, remote_heads = ret
- return remote.addchangegroup(cg, 'push', self.url())
- return ret[1]
+ try:
+ ret = self.prepush(remote, force, revs)
+ if ret[0] is not None:
+ cg, remote_heads = ret
+ return remote.addchangegroup(cg, 'push', self.url())
+ return ret[1]
+ finally:
+ del lock
def push_unbundle(self, remote, force, revs):
# local repo finds heads on server, finds out what revs it
@@ -1789,65 +1827,68 @@
changesets = files = revisions = 0
- tr = self.transaction()
-
# write changelog data to temp files so concurrent readers will not see
# inconsistent view
cl = self.changelog
cl.delayupdate()
oldheads = len(cl.heads())
- # pull off the changeset group
- self.ui.status(_("adding changesets\n"))
- cor = cl.count() - 1
- chunkiter = changegroup.chunkiter(source)
- if cl.addgroup(chunkiter, csmap, tr, 1) is None:
- raise util.Abort(_("received changelog group is empty"))
- cnr = cl.count() - 1
- changesets = cnr - cor
+ tr = self.transaction()
+ try:
+ trp = weakref.proxy(tr)
+ # pull off the changeset group
+ self.ui.status(_("adding changesets\n"))
+ cor = cl.count() - 1
+ chunkiter = changegroup.chunkiter(source)
+ if cl.addgroup(chunkiter, csmap, trp, 1) is None:
+ raise util.Abort(_("received changelog group is empty"))
+ cnr = cl.count() - 1
+ changesets = cnr - cor
- # pull off the manifest group
- self.ui.status(_("adding manifests\n"))
- chunkiter = changegroup.chunkiter(source)
- # no need to check for empty manifest group here:
- # if the result of the merge of 1 and 2 is the same in 3 and 4,
- # no new manifest will be created and the manifest group will
- # be empty during the pull
- self.manifest.addgroup(chunkiter, revmap, tr)
+ # pull off the manifest group
+ self.ui.status(_("adding manifests\n"))
+ chunkiter = changegroup.chunkiter(source)
+ # no need to check for empty manifest group here:
+ # if the result of the merge of 1 and 2 is the same in 3 and 4,
+ # no new manifest will be created and the manifest group will
+ # be empty during the pull
+ self.manifest.addgroup(chunkiter, revmap, trp)
- # process the files
- self.ui.status(_("adding file changes\n"))
- while 1:
- f = changegroup.getchunk(source)
- if not f:
- break
- self.ui.debug(_("adding %s revisions\n") % f)
- fl = self.file(f)
- o = fl.count()
- chunkiter = changegroup.chunkiter(source)
- if fl.addgroup(chunkiter, revmap, tr) is None:
- raise util.Abort(_("received file revlog group is empty"))
- revisions += fl.count() - o
- files += 1
+ # process the files
+ self.ui.status(_("adding file changes\n"))
+ while 1:
+ f = changegroup.getchunk(source)
+ if not f:
+ break
+ self.ui.debug(_("adding %s revisions\n") % f)
+ fl = self.file(f)
+ o = fl.count()
+ chunkiter = changegroup.chunkiter(source)
+ if fl.addgroup(chunkiter, revmap, trp) is None:
+ raise util.Abort(_("received file revlog group is empty"))
+ revisions += fl.count() - o
+ files += 1
+
+ # make changelog see real files again
+ cl.finalize(trp)
- # make changelog see real files again
- cl.finalize(tr)
+ newheads = len(self.changelog.heads())
+ heads = ""
+ if oldheads and newheads != oldheads:
+ heads = _(" (%+d heads)") % (newheads - oldheads)
- newheads = len(self.changelog.heads())
- heads = ""
- if oldheads and newheads != oldheads:
- heads = _(" (%+d heads)") % (newheads - oldheads)
+ self.ui.status(_("added %d changesets"
+ " with %d changes to %d files%s\n")
+ % (changesets, revisions, files, heads))
- self.ui.status(_("added %d changesets"
- " with %d changes to %d files%s\n")
- % (changesets, revisions, files, heads))
+ if changesets > 0:
+ self.hook('pretxnchangegroup', throw=True,
+ node=hex(self.changelog.node(cor+1)), source=srctype,
+ url=url)
- if changesets > 0:
- self.hook('pretxnchangegroup', throw=True,
- node=hex(self.changelog.node(cor+1)), source=srctype,
- url=url)
-
- tr.close()
+ tr.close()
+ finally:
+ del tr
if changesets > 0:
self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
--- a/mercurial/lock.py Wed Aug 08 22:47:30 2007 +0200
+++ b/mercurial/lock.py Wed Aug 08 23:00:01 2007 +0200
@@ -29,14 +29,13 @@
# old-style lock: symlink to pid
# new-style lock: symlink to hostname:pid
+ _host = None
+
def __init__(self, file, timeout=-1, releasefn=None, desc=None):
self.f = file
self.held = 0
self.timeout = timeout
self.releasefn = releasefn
- self.id = None
- self.host = None
- self.pid = None
self.desc = desc
self.lock()
@@ -59,13 +58,12 @@
inst.locker)
def trylock(self):
- if self.id is None:
- self.host = socket.gethostname()
- self.pid = os.getpid()
- self.id = '%s:%s' % (self.host, self.pid)
+ if lock._host is None:
+ lock._host = socket.gethostname()
+ lockname = '%s:%s' % (lock._host, os.getpid())
while not self.held:
try:
- util.makelock(self.id, self.f)
+ util.makelock(lockname, self.f)
self.held = 1
except (OSError, IOError), why:
if why.errno == errno.EEXIST:
@@ -93,7 +91,7 @@
host, pid = locker.split(":", 1)
except ValueError:
return locker
- if host != self.host:
+ if host != lock._host:
return locker
try:
pid = int(pid)
--- a/mercurial/manifest.py Wed Aug 08 22:47:30 2007 +0200
+++ b/mercurial/manifest.py Wed Aug 08 23:00:01 2007 +0200
@@ -23,10 +23,6 @@
def linkf(self, f):
"test for symlink in manifest flags"
return "l" in self.flags(f)
- def rawset(self, f, entry):
- self[f] = bin(entry[:40])
- fl = entry[40:-1]
- if fl: self._flags[f] = fl
def set(self, f, execf=False, linkf=False):
if linkf: self._flags[f] = "l"
elif execf: self._flags[f] = "x"
@@ -40,16 +36,19 @@
self.listcache = None
revlog.__init__(self, opener, "00manifest.i")
- def parselines(self, lines):
- for l in lines.splitlines(1):
- yield l.split('\0')
+ def parse(self, lines):
+ mfdict = manifestdict()
+ for l in lines.splitlines():
+ f, n = l.split('\0')
+ if len(n) > 40:
+ mfdict._flags[f] = n[40:]
+ mfdict[f] = bin(n[:40])
+ else:
+ mfdict[f] = bin(n)
+ return mfdict
def readdelta(self, node):
- delta = mdiff.patchtext(self.delta(node))
- deltamap = manifestdict()
- for f, n in self.parselines(delta):
- deltamap.rawset(f, n)
- return deltamap
+ return self.parse(mdiff.patchtext(self.delta(node)))
def read(self, node):
if node == nullid: return manifestdict() # don't upset local cache
@@ -57,9 +56,7 @@
return self.mapcache[1]
text = self.revision(node)
self.listcache = array.array('c', text)
- mapping = manifestdict()
- for f, n in self.parselines(text):
- mapping.rawset(f, n)
+ mapping = self.parse(text)
self.mapcache = (node, mapping)
return mapping
--- a/mercurial/merge.py Wed Aug 08 22:47:30 2007 +0200
+++ b/mercurial/merge.py Wed Aug 08 23:00:01 2007 +0200
@@ -455,25 +455,25 @@
f, m = a[:2]
if m == "r": # remove
if branchmerge:
- repo.dirstate.update([f], 'r')
+ repo.dirstate.remove(f)
else:
- repo.dirstate.forget([f])
+ repo.dirstate.forget(f)
elif m == "f": # forget
- repo.dirstate.forget([f])
+ repo.dirstate.forget(f)
elif m in "ge": # get or exec change
if branchmerge:
- repo.dirstate.update([f], 'n', st_mtime=-1)
+ repo.dirstate.normaldirty(f)
else:
- repo.dirstate.update([f], 'n')
+ repo.dirstate.normal(f)
elif m == "m": # merge
f2, fd, flag, move = a[2:]
if branchmerge:
# We've done a branch merge, mark this file as merged
# so that we properly record the merger later
- repo.dirstate.update([fd], 'm')
+ repo.dirstate.merge(fd)
if f != f2: # copy/rename
if move:
- repo.dirstate.update([f], 'r')
+ repo.dirstate.remove(f)
if f != fd:
repo.dirstate.copy(f, fd)
else:
@@ -484,95 +484,94 @@
# of that file some time in the past. Thus our
# merge will appear as a normal local file
# modification.
- repo.dirstate.update([fd], 'n', st_size=-1, st_mtime=-1)
+ repo.dirstate.normaldirty(fd)
if move:
- repo.dirstate.forget([f])
+ repo.dirstate.forget(f)
elif m == "d": # directory rename
f2, fd, flag = a[2:]
if not f2 and f not in repo.dirstate:
# untracked file moved
continue
if branchmerge:
- repo.dirstate.update([fd], 'a')
+ repo.dirstate.add(fd)
if f:
- repo.dirstate.update([f], 'r')
+ repo.dirstate.remove(f)
repo.dirstate.copy(f, fd)
if f2:
repo.dirstate.copy(f2, fd)
else:
- repo.dirstate.update([fd], 'n')
+ repo.dirstate.normal(fd)
if f:
- repo.dirstate.forget([f])
+ repo.dirstate.forget(f)
-def update(repo, node, branchmerge, force, partial, wlock):
+def update(repo, node, branchmerge, force, partial):
"""
Perform a merge between the working directory and the given node
branchmerge = whether to merge between branches
force = whether to force branch merging or file overwriting
partial = a function to filter file lists (dirstate not updated)
- wlock = working dir lock, if already held
"""
- if not wlock:
- wlock = repo.wlock()
-
- wc = repo.workingctx()
- if node is None:
- # tip of current branch
- try:
- node = repo.branchtags()[wc.branch()]
- except KeyError:
- raise util.Abort(_("branch %s not found") % wc.branch())
- overwrite = force and not branchmerge
- forcemerge = force and branchmerge
- pl = wc.parents()
- p1, p2 = pl[0], repo.changectx(node)
- pa = p1.ancestor(p2)
- fp1, fp2, xp1, xp2 = p1.node(), p2.node(), str(p1), str(p2)
- fastforward = False
+ wlock = repo.wlock()
+ try:
+ wc = repo.workingctx()
+ if node is None:
+ # tip of current branch
+ try:
+ node = repo.branchtags()[wc.branch()]
+ except KeyError:
+ raise util.Abort(_("branch %s not found") % wc.branch())
+ overwrite = force and not branchmerge
+ forcemerge = force and branchmerge
+ pl = wc.parents()
+ p1, p2 = pl[0], repo.changectx(node)
+ pa = p1.ancestor(p2)
+ fp1, fp2, xp1, xp2 = p1.node(), p2.node(), str(p1), str(p2)
+ fastforward = False
- ### check phase
- if not overwrite and len(pl) > 1:
- raise util.Abort(_("outstanding uncommitted merges"))
- if pa == p1 or pa == p2: # is there a linear path from p1 to p2?
- if branchmerge:
- if p1.branch() != p2.branch() and pa != p2:
- fastforward = True
- else:
- raise util.Abort(_("there is nothing to merge, just use "
- "'hg update' or look at 'hg heads'"))
- elif not (overwrite or branchmerge):
- raise util.Abort(_("update spans branches, use 'hg merge' "
- "or 'hg update -C' to lose changes"))
- if branchmerge and not forcemerge:
- if wc.files():
- raise util.Abort(_("outstanding uncommitted changes"))
+ ### check phase
+ if not overwrite and len(pl) > 1:
+ raise util.Abort(_("outstanding uncommitted merges"))
+ if pa == p1 or pa == p2: # is there a linear path from p1 to p2?
+ if branchmerge:
+ if p1.branch() != p2.branch() and pa != p2:
+ fastforward = True
+ else:
+ raise util.Abort(_("there is nothing to merge, just use "
+ "'hg update' or look at 'hg heads'"))
+ elif not (overwrite or branchmerge):
+ raise util.Abort(_("update spans branches, use 'hg merge' "
+ "or 'hg update -C' to lose changes"))
+ if branchmerge and not forcemerge:
+ if wc.files():
+ raise util.Abort(_("outstanding uncommitted changes"))
- ### calculate phase
- action = []
- if not force:
- checkunknown(wc, p2)
- if not util.checkfolding(repo.path):
- checkcollision(p2)
- if not branchmerge:
- action += forgetremoved(wc, p2)
- action += manifestmerge(repo, wc, p2, pa, overwrite, partial)
+ ### calculate phase
+ action = []
+ if not force:
+ checkunknown(wc, p2)
+ if not util.checkfolding(repo.path):
+ checkcollision(p2)
+ if not branchmerge:
+ action += forgetremoved(wc, p2)
+ action += manifestmerge(repo, wc, p2, pa, overwrite, partial)
- ### apply phase
- if not branchmerge: # just jump to the new rev
- fp1, fp2, xp1, xp2 = fp2, nullid, xp2, ''
- if not partial:
- repo.hook('preupdate', throw=True, parent1=xp1, parent2=xp2)
+ ### apply phase
+ if not branchmerge: # just jump to the new rev
+ fp1, fp2, xp1, xp2 = fp2, nullid, xp2, ''
+ if not partial:
+ repo.hook('preupdate', throw=True, parent1=xp1, parent2=xp2)
- stats = applyupdates(repo, action, wc, p2)
+ stats = applyupdates(repo, action, wc, p2)
- if not partial:
- recordupdates(repo, action, branchmerge)
- repo.dirstate.setparents(fp1, fp2)
- if not branchmerge and not fastforward:
- repo.dirstate.setbranch(p2.branch())
- repo.hook('update', parent1=xp1, parent2=xp2, error=stats[3])
+ if not partial:
+ recordupdates(repo, action, branchmerge)
+ repo.dirstate.setparents(fp1, fp2)
+ if not branchmerge and not fastforward:
+ repo.dirstate.setbranch(p2.branch())
+ repo.hook('update', parent1=xp1, parent2=xp2, error=stats[3])
- return stats
-
+ return stats
+ finally:
+ del wlock
--- a/mercurial/node.py Wed Aug 08 22:47:30 2007 +0200
+++ b/mercurial/node.py Wed Aug 08 23:00:01 2007 +0200
@@ -12,11 +12,9 @@
nullrev = -1
nullid = "\0" * 20
-def hex(node):
- return binascii.hexlify(node)
-
-def bin(node):
- return binascii.unhexlify(node)
+# This ugly style has a noticeable effect in manifest parsing
+hex = binascii.hexlify
+bin = binascii.unhexlify
def short(node):
return hex(node[:6])
--- a/mercurial/patch.py Wed Aug 08 22:47:30 2007 +0200
+++ b/mercurial/patch.py Wed Aug 08 23:00:01 2007 +0200
@@ -1,16 +1,23 @@
# patch.py - patch file parsing routines
#
# Copyright 2006 Brendan Cully <brendan@kublai.com>
+# Copyright 2007 Chris Mason <chris.mason@oracle.com>
#
# This software may be used and distributed according to the terms
# of the GNU General Public License, incorporated herein by reference.
from i18n import _
from node import *
-import base85, cmdutil, mdiff, util, context, revlog
+import base85, cmdutil, mdiff, util, context, revlog, diffhelpers
import cStringIO, email.Parser, os, popen2, re, sha
import sys, tempfile, zlib
+class PatchError(Exception):
+ pass
+
+class NoHunks(PatchError):
+ pass
+
# helper functions
def copyfile(src, dst, basedir=None):
@@ -50,7 +57,7 @@
try:
msg = email.Parser.Parser().parse(fileobj)
- message = msg['Subject']
+ subject = msg['Subject']
user = msg['From']
# should try to parse msg['Date']
date = None
@@ -58,18 +65,18 @@
branch = None
parents = []
- if message:
- if message.startswith('[PATCH'):
- pend = message.find(']')
+ if subject:
+ if subject.startswith('[PATCH'):
+ pend = subject.find(']')
if pend >= 0:
- message = message[pend+1:].lstrip()
- message = message.replace('\n\t', ' ')
- ui.debug('Subject: %s\n' % message)
+ subject = subject[pend+1:].lstrip()
+ subject = subject.replace('\n\t', ' ')
+ ui.debug('Subject: %s\n' % subject)
if user:
ui.debug('From: %s\n' % user)
diffs_seen = 0
ok_types = ('text/plain', 'text/x-diff', 'text/x-patch')
-
+ message = ''
for part in msg.walk():
content_type = part.get_content_type()
ui.debug('Content-Type: %s\n' % content_type)
@@ -84,9 +91,6 @@
ui.debug(_('found patch at byte %d\n') % m.start(0))
diffs_seen += 1
cfp = cStringIO.StringIO()
- if message:
- cfp.write(message)
- cfp.write('\n')
for line in payload[:m.start(0)].splitlines():
if line.startswith('# HG changeset patch'):
ui.debug(_('patch generated by hg export\n'))
@@ -94,6 +98,7 @@
# drop earlier commit message content
cfp.seek(0)
cfp.truncate()
+ subject = None
elif hgpatch:
if line.startswith('# User '):
user = line[7:]
@@ -123,6 +128,8 @@
os.unlink(tmpname)
raise
+ if subject and not message.startswith(subject):
+ message = '%s\n%s' % (subject, message)
tmpfp.close()
if not diffs_seen:
os.unlink(tmpname)
@@ -135,7 +142,7 @@
GP_FILTER = 1 << 1 # there's some copy/rename operation
GP_BINARY = 1 << 2 # there's a binary patch
-def readgitpatch(patchname):
+def readgitpatch(fp, firstline=None):
"""extract git-style metadata about patches from <patchname>"""
class gitpatch:
"op is one of ADD, DELETE, RENAME, MODIFY or COPY"
@@ -148,16 +155,21 @@
self.lineno = 0
self.binary = False
+ def reader(fp, firstline):
+ if firstline is not None:
+ yield firstline
+ for line in fp:
+ yield line
+
# Filter patch for git information
gitre = re.compile('diff --git a/(.*) b/(.*)')
- pf = file(patchname)
gp = None
gitpatches = []
# Can have a git patch with only metadata, causing patch to complain
dopatch = 0
lineno = 0
- for line in pf:
+ for line in reader(fp, firstline):
lineno += 1
if line.startswith('diff --git'):
m = gitre.match(line)
@@ -190,9 +202,9 @@
gp.op = 'DELETE'
elif line.startswith('new file mode '):
gp.op = 'ADD'
- gp.mode = int(line.rstrip()[-3:], 8)
+ gp.mode = int(line.rstrip()[-6:], 8)
elif line.startswith('new mode '):
- gp.mode = int(line.rstrip()[-3:], 8)
+ gp.mode = int(line.rstrip()[-6:], 8)
elif line.startswith('GIT binary patch'):
dopatch |= GP_BINARY
gp.binary = True
@@ -204,157 +216,793 @@
return (dopatch, gitpatches)
-def dogitpatch(patchname, gitpatches, cwd=None):
- """Preprocess git patch so that vanilla patch can handle it"""
- def extractbin(fp):
- i = [0] # yuck
- def readline():
- i[0] += 1
- return fp.readline().rstrip()
- line = readline()
+def patch(patchname, ui, strip=1, cwd=None, files={}):
+ """apply <patchname> to the working directory.
+ returns whether patch was applied with fuzz factor."""
+ patcher = ui.config('ui', 'patch')
+ args = []
+ try:
+ if patcher:
+ return externalpatch(patcher, args, patchname, ui, strip, cwd,
+ files)
+ else:
+ try:
+ return internalpatch(patchname, ui, strip, cwd, files)
+ except NoHunks:
+ patcher = util.find_exe('gpatch') or util.find_exe('patch')
+ ui.debug('no valid hunks found; trying with %r instead\n' %
+ patcher)
+ if util.needbinarypatch():
+ args.append('--binary')
+ return externalpatch(patcher, args, patchname, ui, strip, cwd,
+ files)
+ except PatchError, err:
+ s = str(err)
+ if s:
+ raise util.Abort(s)
+ else:
+ raise util.Abort(_('patch failed to apply'))
+
+def externalpatch(patcher, args, patchname, ui, strip, cwd, files):
+ """use <patcher> to apply <patchname> to the working directory.
+ returns whether patch was applied with fuzz factor."""
+
+ fuzz = False
+ if cwd:
+ args.append('-d %s' % util.shellquote(cwd))
+ fp = os.popen('%s %s -p%d < %s' % (patcher, ' '.join(args), strip,
+ util.shellquote(patchname)))
+
+ for line in fp:
+ line = line.rstrip()
+ ui.note(line + '\n')
+ if line.startswith('patching file '):
+ pf = util.parse_patch_output(line)
+ printed_file = False
+ files.setdefault(pf, (None, None))
+ elif line.find('with fuzz') >= 0:
+ fuzz = True
+ if not printed_file:
+ ui.warn(pf + '\n')
+ printed_file = True
+ ui.warn(line + '\n')
+ elif line.find('saving rejects to file') >= 0:
+ ui.warn(line + '\n')
+ elif line.find('FAILED') >= 0:
+ if not printed_file:
+ ui.warn(pf + '\n')
+ printed_file = True
+ ui.warn(line + '\n')
+ code = fp.close()
+ if code:
+ raise PatchError(_("patch command failed: %s") %
+ util.explain_exit(code)[0])
+ return fuzz
+
+def internalpatch(patchobj, ui, strip, cwd, files={}):
+ """use builtin patch to apply <patchobj> to the working directory.
+ returns whether patch was applied with fuzz factor."""
+ try:
+ fp = file(patchobj, 'rb')
+ except TypeError:
+ fp = patchobj
+ if cwd:
+ curdir = os.getcwd()
+ os.chdir(cwd)
+ try:
+ ret = applydiff(ui, fp, files, strip=strip)
+ finally:
+ if cwd:
+ os.chdir(curdir)
+ if ret < 0:
+ raise PatchError
+ return ret > 0
+
+# @@ -start,len +start,len @@ or @@ -start +start @@ if len is 1
+unidesc = re.compile('@@ -(\d+)(,(\d+))? \+(\d+)(,(\d+))? @@')
+contextdesc = re.compile('(---|\*\*\*) (\d+)(,(\d+))? (---|\*\*\*)')
+
+class patchfile:
+ def __init__(self, ui, fname):
+ self.fname = fname
+ self.ui = ui
+ try:
+ fp = file(fname, 'rb')
+ self.lines = fp.readlines()
+ self.exists = True
+ except IOError:
+ dirname = os.path.dirname(fname)
+ if dirname and not os.path.isdir(dirname):
+ dirs = dirname.split(os.path.sep)
+ d = ""
+ for x in dirs:
+ d = os.path.join(d, x)
+ if not os.path.isdir(d):
+ os.mkdir(d)
+ self.lines = []
+ self.exists = False
+
+ self.hash = {}
+ self.dirty = 0
+ self.offset = 0
+ self.rej = []
+ self.fileprinted = False
+ self.printfile(False)
+ self.hunks = 0
+
+ def printfile(self, warn):
+ if self.fileprinted:
+ return
+ if warn or self.ui.verbose:
+ self.fileprinted = True
+ s = _("patching file %s\n") % self.fname
+ if warn:
+ self.ui.warn(s)
+ else:
+ self.ui.note(s)
+
+
+ def findlines(self, l, linenum):
+ # looks through the hash and finds candidate lines. The
+ # result is a list of line numbers sorted based on distance
+ # from linenum
+ def sorter(a, b):
+ vala = abs(a - linenum)
+ valb = abs(b - linenum)
+ return cmp(vala, valb)
+
+ try:
+ cand = self.hash[l]
+ except:
+ return []
+
+ if len(cand) > 1:
+ # resort our list of potentials forward then back.
+ cand.sort(cmp=sorter)
+ return cand
+
+ def hashlines(self):
+ self.hash = {}
+ for x in xrange(len(self.lines)):
+ s = self.lines[x]
+ self.hash.setdefault(s, []).append(x)
+
+ def write_rej(self):
+ # our rejects are a little different from patch(1). This always
+ # creates rejects in the same form as the original patch. A file
+ # header is inserted so that you can run the reject through patch again
+ # without having to type the filename.
+
+ if not self.rej:
+ return
+ if self.hunks != 1:
+ hunkstr = "s"
+ else:
+ hunkstr = ""
+
+ fname = self.fname + ".rej"
+ self.ui.warn(
+ _("%d out of %d hunk%s FAILED -- saving rejects to file %s\n") %
+ (len(self.rej), self.hunks, hunkstr, fname))
+ try: os.unlink(fname)
+ except:
+ pass
+ fp = file(fname, 'wb')
+ base = os.path.basename(self.fname)
+ fp.write("--- %s\n+++ %s\n" % (base, base))
+ for x in self.rej:
+ for l in x.hunk:
+ fp.write(l)
+ if l[-1] != '\n':
+ fp.write("\n\ No newline at end of file\n")
+
+ def write(self, dest=None):
+ if self.dirty:
+ if not dest:
+ dest = self.fname
+ st = None
+ try:
+ st = os.lstat(dest)
+ if st.st_nlink > 1:
+ os.unlink(dest)
+ except: pass
+ fp = file(dest, 'wb')
+ if st:
+ os.chmod(dest, st.st_mode)
+ fp.writelines(self.lines)
+ fp.close()
+
+ def close(self):
+ self.write()
+ self.write_rej()
+
+ def apply(self, h, reverse):
+ if not h.complete():
+ raise PatchError(_("bad hunk #%d %s (%d %d %d %d)") %
+ (h.number, h.desc, len(h.a), h.lena, len(h.b),
+ h.lenb))
+
+ self.hunks += 1
+ if reverse:
+ h.reverse()
+
+ if self.exists and h.createfile():
+ self.ui.warn(_("file %s already exists\n") % self.fname)
+ self.rej.append(h)
+ return -1
+
+ if isinstance(h, binhunk):
+ if h.rmfile():
+ os.unlink(self.fname)
+ else:
+ self.lines[:] = h.new()
+ self.offset += len(h.new())
+ self.dirty = 1
+ return 0
+
+ # fast case first, no offsets, no fuzz
+ old = h.old()
+ # patch starts counting at 1 unless we are adding the file
+ if h.starta == 0:
+ start = 0
+ else:
+ start = h.starta + self.offset - 1
+ orig_start = start
+ if diffhelpers.testhunk(old, self.lines, start) == 0:
+ if h.rmfile():
+ os.unlink(self.fname)
+ else:
+ self.lines[start : start + h.lena] = h.new()
+ self.offset += h.lenb - h.lena
+ self.dirty = 1
+ return 0
+
+ # ok, we couldn't match the hunk. Lets look for offsets and fuzz it
+ self.hashlines()
+ if h.hunk[-1][0] != ' ':
+ # if the hunk tried to put something at the bottom of the file
+ # override the start line and use eof here
+ search_start = len(self.lines)
+ else:
+ search_start = orig_start
+
+ for fuzzlen in xrange(3):
+ for toponly in [ True, False ]:
+ old = h.old(fuzzlen, toponly)
+
+ cand = self.findlines(old[0][1:], search_start)
+ for l in cand:
+ if diffhelpers.testhunk(old, self.lines, l) == 0:
+ newlines = h.new(fuzzlen, toponly)
+ self.lines[l : l + len(old)] = newlines
+ self.offset += len(newlines) - len(old)
+ self.dirty = 1
+ if fuzzlen:
+ fuzzstr = "with fuzz %d " % fuzzlen
+ f = self.ui.warn
+ self.printfile(True)
+ else:
+ fuzzstr = ""
+ f = self.ui.note
+ offset = l - orig_start - fuzzlen
+ if offset == 1:
+ linestr = "line"
+ else:
+ linestr = "lines"
+ f(_("Hunk #%d succeeded at %d %s(offset %d %s).\n") %
+ (h.number, l+1, fuzzstr, offset, linestr))
+ return fuzzlen
+ self.printfile(True)
+ self.ui.warn(_("Hunk #%d FAILED at %d\n") % (h.number, orig_start))
+ self.rej.append(h)
+ return -1
+
+class hunk:
+ def __init__(self, desc, num, lr, context):
+ self.number = num
+ self.desc = desc
+ self.hunk = [ desc ]
+ self.a = []
+ self.b = []
+ if context:
+ self.read_context_hunk(lr)
+ else:
+ self.read_unified_hunk(lr)
+
+ def read_unified_hunk(self, lr):
+ m = unidesc.match(self.desc)
+ if not m:
+ raise PatchError(_("bad hunk #%d") % self.number)
+ self.starta, foo, self.lena, self.startb, foo2, self.lenb = m.groups()
+ if self.lena == None:
+ self.lena = 1
+ else:
+ self.lena = int(self.lena)
+ if self.lenb == None:
+ self.lenb = 1
+ else:
+ self.lenb = int(self.lenb)
+ self.starta = int(self.starta)
+ self.startb = int(self.startb)
+ diffhelpers.addlines(lr.fp, self.hunk, self.lena, self.lenb, self.a, self.b)
+ # if we hit eof before finishing out the hunk, the last line will
+ # be zero length. Lets try to fix it up.
+ while len(self.hunk[-1]) == 0:
+ del self.hunk[-1]
+ del self.a[-1]
+ del self.b[-1]
+ self.lena -= 1
+ self.lenb -= 1
+
+ def read_context_hunk(self, lr):
+ self.desc = lr.readline()
+ m = contextdesc.match(self.desc)
+ if not m:
+ raise PatchError(_("bad hunk #%d") % self.number)
+ foo, self.starta, foo2, aend, foo3 = m.groups()
+ self.starta = int(self.starta)
+ if aend == None:
+ aend = self.starta
+ self.lena = int(aend) - self.starta
+ if self.starta:
+ self.lena += 1
+ for x in xrange(self.lena):
+ l = lr.readline()
+ if l.startswith('---'):
+ lr.push(l)
+ break
+ s = l[2:]
+ if l.startswith('- ') or l.startswith('! '):
+ u = '-' + s
+ elif l.startswith(' '):
+ u = ' ' + s
+ else:
+ raise PatchError(_("bad hunk #%d old text line %d") %
+ (self.number, x))
+ self.a.append(u)
+ self.hunk.append(u)
+
+ l = lr.readline()
+ if l.startswith('\ '):
+ s = self.a[-1][:-1]
+ self.a[-1] = s
+ self.hunk[-1] = s
+ l = lr.readline()
+ m = contextdesc.match(l)
+ if not m:
+ raise PatchError(_("bad hunk #%d") % self.number)
+ foo, self.startb, foo2, bend, foo3 = m.groups()
+ self.startb = int(self.startb)
+ if bend == None:
+ bend = self.startb
+ self.lenb = int(bend) - self.startb
+ if self.startb:
+ self.lenb += 1
+ hunki = 1
+ for x in xrange(self.lenb):
+ l = lr.readline()
+ if l.startswith('\ '):
+ s = self.b[-1][:-1]
+ self.b[-1] = s
+ self.hunk[hunki-1] = s
+ continue
+ if not l:
+ lr.push(l)
+ break
+ s = l[2:]
+ if l.startswith('+ ') or l.startswith('! '):
+ u = '+' + s
+ elif l.startswith(' '):
+ u = ' ' + s
+ elif len(self.b) == 0:
+ # this can happen when the hunk does not add any lines
+ lr.push(l)
+ break
+ else:
+ raise PatchError(_("bad hunk #%d old text line %d") %
+ (self.number, x))
+ self.b.append(s)
+ while True:
+ if hunki >= len(self.hunk):
+ h = ""
+ else:
+ h = self.hunk[hunki]
+ hunki += 1
+ if h == u:
+ break
+ elif h.startswith('-'):
+ continue
+ else:
+ self.hunk.insert(hunki-1, u)
+ break
+
+ if not self.a:
+ # this happens when lines were only added to the hunk
+ for x in self.hunk:
+ if x.startswith('-') or x.startswith(' '):
+ self.a.append(x)
+ if not self.b:
+ # this happens when lines were only deleted from the hunk
+ for x in self.hunk:
+ if x.startswith('+') or x.startswith(' '):
+ self.b.append(x[1:])
+ # @@ -start,len +start,len @@
+ self.desc = "@@ -%d,%d +%d,%d @@\n" % (self.starta, self.lena,
+ self.startb, self.lenb)
+ self.hunk[0] = self.desc
+
+ def reverse(self):
+ origlena = self.lena
+ origstarta = self.starta
+ self.lena = self.lenb
+ self.starta = self.startb
+ self.lenb = origlena
+ self.startb = origstarta
+ self.a = []
+ self.b = []
+ # self.hunk[0] is the @@ description
+ for x in xrange(1, len(self.hunk)):
+ o = self.hunk[x]
+ if o.startswith('-'):
+ n = '+' + o[1:]
+ self.b.append(o[1:])
+ elif o.startswith('+'):
+ n = '-' + o[1:]
+ self.a.append(n)
+ else:
+ n = o
+ self.b.append(o[1:])
+ self.a.append(o)
+ self.hunk[x] = o
+
+ def fix_newline(self):
+ diffhelpers.fix_newline(self.hunk, self.a, self.b)
+
+ def complete(self):
+ return len(self.a) == self.lena and len(self.b) == self.lenb
+
+ def createfile(self):
+ return self.starta == 0 and self.lena == 0
+
+ def rmfile(self):
+ return self.startb == 0 and self.lenb == 0
+
+ def fuzzit(self, l, fuzz, toponly):
+ # this removes context lines from the top and bottom of list 'l'. It
+ # checks the hunk to make sure only context lines are removed, and then
+ # returns a new shortened list of lines.
+ fuzz = min(fuzz, len(l)-1)
+ if fuzz:
+ top = 0
+ bot = 0
+ hlen = len(self.hunk)
+ for x in xrange(hlen-1):
+ # the hunk starts with the @@ line, so use x+1
+ if self.hunk[x+1][0] == ' ':
+ top += 1
+ else:
+ break
+ if not toponly:
+ for x in xrange(hlen-1):
+ if self.hunk[hlen-bot-1][0] == ' ':
+ bot += 1
+ else:
+ break
+
+ # top and bot now count context in the hunk
+ # adjust them if either one is short
+ context = max(top, bot, 3)
+ if bot < context:
+ bot = max(0, fuzz - (context - bot))
+ else:
+ bot = min(fuzz, bot)
+ if top < context:
+ top = max(0, fuzz - (context - top))
+ else:
+ top = min(fuzz, top)
+
+ return l[top:len(l)-bot]
+ return l
+
+ def old(self, fuzz=0, toponly=False):
+ return self.fuzzit(self.a, fuzz, toponly)
+
+ def newctrl(self):
+ res = []
+ for x in self.hunk:
+ c = x[0]
+ if c == ' ' or c == '+':
+ res.append(x)
+ return res
+
+ def new(self, fuzz=0, toponly=False):
+ return self.fuzzit(self.b, fuzz, toponly)
+
+class binhunk:
+ 'A binary patch file. Only understands literals so far.'
+ def __init__(self, gitpatch):
+ self.gitpatch = gitpatch
+ self.text = None
+ self.hunk = ['GIT binary patch\n']
+
+ def createfile(self):
+ return self.gitpatch.op in ('ADD', 'RENAME', 'COPY')
+
+ def rmfile(self):
+ return self.gitpatch.op == 'DELETE'
+
+ def complete(self):
+ return self.text is not None
+
+ def new(self):
+ return [self.text]
+
+ def extract(self, fp):
+ line = fp.readline()
+ self.hunk.append(line)
while line and not line.startswith('literal '):
- line = readline()
+ line = fp.readline()
+ self.hunk.append(line)
if not line:
- return None, i[0]
- size = int(line[8:])
+ raise PatchError(_('could not extract binary patch'))
+ size = int(line[8:].rstrip())
dec = []
- line = readline()
- while line:
+ line = fp.readline()
+ self.hunk.append(line)
+ while len(line) > 1:
l = line[0]
if l <= 'Z' and l >= 'A':
l = ord(l) - ord('A') + 1
else:
l = ord(l) - ord('a') + 27
- dec.append(base85.b85decode(line[1:])[:l])
- line = readline()
+ dec.append(base85.b85decode(line[1:-1])[:l])
+ line = fp.readline()
+ self.hunk.append(line)
text = zlib.decompress(''.join(dec))
if len(text) != size:
- raise util.Abort(_('binary patch is %d bytes, not %d') %
- (len(text), size))
- return text, i[0]
+ raise PatchError(_('binary patch is %d bytes, not %d') %
+ len(text), size)
+ self.text = text
- pf = file(patchname)
- pfline = 1
-
- fd, patchname = tempfile.mkstemp(prefix='hg-patch-')
- tmpfp = os.fdopen(fd, 'w')
+def parsefilename(str):
+ # --- filename \t|space stuff
+ s = str[4:]
+ i = s.find('\t')
+ if i < 0:
+ i = s.find(' ')
+ if i < 0:
+ return s
+ return s[:i]
- try:
- for i in xrange(len(gitpatches)):
- p = gitpatches[i]
- if not p.copymod and not p.binary:
- continue
-
- # rewrite patch hunk
- while pfline < p.lineno:
- tmpfp.write(pf.readline())
- pfline += 1
+def selectfile(afile_orig, bfile_orig, hunk, strip, reverse):
+ def pathstrip(path, count=1):
+ pathlen = len(path)
+ i = 0
+ if count == 0:
+ return path.rstrip()
+ while count > 0:
+ i = path.find('/', i)
+ if i == -1:
+ raise PatchError(_("unable to strip away %d dirs from %s") %
+ (count, path))
+ i += 1
+ # consume '//' in the path
+ while i < pathlen - 1 and path[i] == '/':
+ i += 1
+ count -= 1
+ return path[i:].rstrip()
- if p.binary:
- text, delta = extractbin(pf)
- if not text:
- raise util.Abort(_('binary patch extraction failed'))
- pfline += delta
- if not cwd:
- cwd = os.getcwd()
- absdst = os.path.join(cwd, p.path)
- basedir = os.path.dirname(absdst)
- if not os.path.isdir(basedir):
- os.makedirs(basedir)
- out = file(absdst, 'wb')
- out.write(text)
- out.close()
- elif p.copymod:
- copyfile(p.oldpath, p.path, basedir=cwd)
- tmpfp.write('diff --git a/%s b/%s\n' % (p.path, p.path))
- line = pf.readline()
- pfline += 1
- while not line.startswith('--- a/'):
- tmpfp.write(line)
- line = pf.readline()
- pfline += 1
- tmpfp.write('--- a/%s\n' % p.path)
+ nulla = afile_orig == "/dev/null"
+ nullb = bfile_orig == "/dev/null"
+ afile = pathstrip(afile_orig, strip)
+ gooda = os.path.exists(afile) and not nulla
+ bfile = pathstrip(bfile_orig, strip)
+ if afile == bfile:
+ goodb = gooda
+ else:
+ goodb = os.path.exists(bfile) and not nullb
+ createfunc = hunk.createfile
+ if reverse:
+ createfunc = hunk.rmfile
+ if not goodb and not gooda and not createfunc():
+ raise PatchError(_("unable to find %s or %s for patching") %
+ (afile, bfile))
+ if gooda and goodb:
+ fname = bfile
+ if afile in bfile:
+ fname = afile
+ elif gooda:
+ fname = afile
+ elif not nullb:
+ fname = bfile
+ if afile in bfile:
+ fname = afile
+ elif not nulla:
+ fname = afile
+ return fname
+
+class linereader:
+ # simple class to allow pushing lines back into the input stream
+ def __init__(self, fp):
+ self.fp = fp
+ self.buf = []
+
+ def push(self, line):
+ self.buf.append(line)
- line = pf.readline()
- while line:
- tmpfp.write(line)
- line = pf.readline()
- except:
- tmpfp.close()
- os.unlink(patchname)
- raise
+ def readline(self):
+ if self.buf:
+ l = self.buf[0]
+ del self.buf[0]
+ return l
+ return self.fp.readline()
+
+def applydiff(ui, fp, changed, strip=1, sourcefile=None, reverse=False,
+ rejmerge=None, updatedir=None):
+ """reads a patch from fp and tries to apply it. The dict 'changed' is
+ filled in with all of the filenames changed by the patch. Returns 0
+ for a clean patch, -1 if any rejects were found and 1 if there was
+ any fuzz."""
+
+ def scangitpatch(fp, firstline, cwd=None):
+ '''git patches can modify a file, then copy that file to
+ a new file, but expect the source to be the unmodified form.
+ So we scan the patch looking for that case so we can do
+ the copies ahead of time.'''
- tmpfp.close()
- return patchname
+ pos = 0
+ try:
+ pos = fp.tell()
+ except IOError:
+ fp = cStringIO.StringIO(fp.read())
+
+ (dopatch, gitpatches) = readgitpatch(fp, firstline)
+ for gp in gitpatches:
+ if gp.copymod:
+ copyfile(gp.oldpath, gp.path, basedir=cwd)
+
+ fp.seek(pos)
-def patch(patchname, ui, strip=1, cwd=None, files={}):
- """apply the patch <patchname> to the working directory.
- a list of patched files is returned"""
+ return fp, dopatch, gitpatches
+
+ current_hunk = None
+ current_file = None
+ afile = ""
+ bfile = ""
+ state = None
+ hunknum = 0
+ rejects = 0
+
+ git = False
+ gitre = re.compile('diff --git (a/.*) (b/.*)')
- # helper function
- def __patch(patchname):
- """patch and updates the files and fuzz variables"""
- fuzz = False
-
- args = []
- patcher = ui.config('ui', 'patch')
- if not patcher:
- patcher = util.find_exe('gpatch') or util.find_exe('patch')
- # Try to be smart only if patch call was not supplied
- if util.needbinarypatch():
- args.append('--binary')
-
- if not patcher:
- raise util.Abort(_('no patch command found in hgrc or PATH'))
-
- if cwd:
- args.append('-d %s' % util.shellquote(cwd))
- fp = os.popen('%s %s -p%d < %s' % (patcher, ' '.join(args), strip,
- util.shellquote(patchname)))
+ # our states
+ BFILE = 1
+ err = 0
+ context = None
+ lr = linereader(fp)
+ dopatch = True
+ gitworkdone = False
- for line in fp:
- line = line.rstrip()
- ui.note(line + '\n')
- if line.startswith('patching file '):
- pf = util.parse_patch_output(line)
- printed_file = False
- files.setdefault(pf, (None, None))
- elif line.find('with fuzz') >= 0:
- fuzz = True
- if not printed_file:
- ui.warn(pf + '\n')
- printed_file = True
- ui.warn(line + '\n')
- elif line.find('saving rejects to file') >= 0:
- ui.warn(line + '\n')
- elif line.find('FAILED') >= 0:
- if not printed_file:
- ui.warn(pf + '\n')
- printed_file = True
- ui.warn(line + '\n')
- code = fp.close()
- if code:
- raise util.Abort(_("patch command failed: %s") %
- util.explain_exit(code)[0])
- return fuzz
+ while True:
+ newfile = False
+ x = lr.readline()
+ if not x:
+ break
+ if current_hunk:
+ if x.startswith('\ '):
+ current_hunk.fix_newline()
+ ret = current_file.apply(current_hunk, reverse)
+ if ret >= 0:
+ changed.setdefault(current_file.fname, (None, None))
+ if ret > 0:
+ err = 1
+ current_hunk = None
+ gitworkdone = False
+ if ((sourcefile or state == BFILE) and ((not context and x[0] == '@') or
+ ((context or context == None) and x.startswith('***************')))):
+ try:
+ if context == None and x.startswith('***************'):
+ context = True
+ current_hunk = hunk(x, hunknum + 1, lr, context)
+ except PatchError, err:
+ ui.debug(err)
+ current_hunk = None
+ continue
+ hunknum += 1
+ if not current_file:
+ if sourcefile:
+ current_file = patchfile(ui, sourcefile)
+ else:
+ current_file = selectfile(afile, bfile, current_hunk,
+ strip, reverse)
+ current_file = patchfile(ui, current_file)
+ elif state == BFILE and x.startswith('GIT binary patch'):
+ current_hunk = binhunk(changed[bfile[2:]][1])
+ if not current_file:
+ if sourcefile:
+ current_file = patchfile(ui, sourcefile)
+ else:
+ current_file = selectfile(afile, bfile, current_hunk,
+ strip, reverse)
+ current_file = patchfile(ui, current_file)
+ hunknum += 1
+ current_hunk.extract(fp)
+ elif x.startswith('diff --git'):
+ # check for git diff, scanning the whole patch file if needed
+ m = gitre.match(x)
+ if m:
+ afile, bfile = m.group(1, 2)
+ if not git:
+ git = True
+ fp, dopatch, gitpatches = scangitpatch(fp, x)
+ for gp in gitpatches:
+ changed[gp.path] = (gp.op, gp)
+ # else error?
+ # copy/rename + modify should modify target, not source
+ if changed.get(bfile[2:], (None, None))[0] in ('COPY',
+ 'RENAME'):
+ afile = bfile
+ gitworkdone = True
+ newfile = True
+ elif x.startswith('---'):
+ # check for a unified diff
+ l2 = lr.readline()
+ if not l2.startswith('+++'):
+ lr.push(l2)
+ continue
+ newfile = True
+ context = False
+ afile = parsefilename(x)
+ bfile = parsefilename(l2)
+ elif x.startswith('***'):
+ # check for a context diff
+ l2 = lr.readline()
+ if not l2.startswith('---'):
+ lr.push(l2)
+ continue
+ l3 = lr.readline()
+ lr.push(l3)
+ if not l3.startswith("***************"):
+ lr.push(l2)
+ continue
+ newfile = True
+ context = True
+ afile = parsefilename(x)
+ bfile = parsefilename(l2)
- (dopatch, gitpatches) = readgitpatch(patchname)
- for gp in gitpatches:
- files[gp.path] = (gp.op, gp)
-
- fuzz = False
- if dopatch:
- filterpatch = dopatch & (GP_FILTER | GP_BINARY)
- if filterpatch:
- patchname = dogitpatch(patchname, gitpatches, cwd=cwd)
- try:
- if dopatch & GP_PATCH:
- fuzz = __patch(patchname)
- finally:
- if filterpatch:
- os.unlink(patchname)
-
- return fuzz
+ if newfile:
+ if current_file:
+ current_file.close()
+ if rejmerge:
+ rejmerge(current_file)
+ rejects += len(current_file.rej)
+ state = BFILE
+ current_file = None
+ hunknum = 0
+ if current_hunk:
+ if current_hunk.complete():
+ ret = current_file.apply(current_hunk, reverse)
+ if ret >= 0:
+ changed.setdefault(current_file.fname, (None, None))
+ if ret > 0:
+ err = 1
+ else:
+ fname = current_file and current_file.fname or None
+ raise PatchError(_("malformed patch %s %s") % (fname,
+ current_hunk.desc))
+ if current_file:
+ current_file.close()
+ if rejmerge:
+ rejmerge(current_file)
+ rejects += len(current_file.rej)
+ if updatedir and git:
+ updatedir(gitpatches)
+ if rejects:
+ return -1
+ if hunknum == 0 and dopatch and not gitworkdone:
+ raise NoHunks
+ return err
def diffopts(ui, opts={}, untrusted=False):
def get(key, name=None):
@@ -369,7 +1017,7 @@
ignorewsamount=get('ignore_space_change', 'ignorewsamount'),
ignoreblanklines=get('ignore_blank_lines', 'ignoreblanklines'))
-def updatedir(ui, repo, patches, wlock=None):
+def updatedir(ui, repo, patches):
'''Update dirstate after patch application according to metadata'''
if not patches:
return
@@ -391,29 +1039,32 @@
for src, dst, after in copies:
if not after:
copyfile(src, dst, repo.root)
- repo.copy(src, dst, wlock=wlock)
+ repo.copy(src, dst)
removes = removes.keys()
if removes:
removes.sort()
- repo.remove(removes, True, wlock=wlock)
+ repo.remove(removes, True)
for f in patches:
ctype, gp = patches[f]
if gp and gp.mode:
x = gp.mode & 0100 != 0
+ l = gp.mode & 020000 != 0
dst = os.path.join(repo.root, gp.path)
# patch won't create empty files
if ctype == 'ADD' and not os.path.exists(dst):
repo.wwrite(gp.path, '', x and 'x' or '')
else:
- util.set_exec(dst, x)
- cmdutil.addremove(repo, cfiles, wlock=wlock)
+ util.set_link(dst, l)
+ if not l:
+ util.set_exec(dst, x)
+ cmdutil.addremove(repo, cfiles)
files = patches.keys()
files.extend([r for r in removes if r not in files])
files.sort()
return files
-def b85diff(fp, to, tn):
+def b85diff(to, tn):
'''print base85-encoded binary diff'''
def gitindex(text):
if not text:
@@ -497,11 +1148,15 @@
if node2:
ctx2 = context.changectx(repo, node2)
execf2 = ctx2.manifest().execf
+ linkf2 = ctx2.manifest().linkf
else:
ctx2 = context.workingctx(repo)
execf2 = util.execfunc(repo.root, None)
+ linkf2 = util.linkfunc(repo.root, None)
if execf2 is None:
- execf2 = ctx2.parents()[0].manifest().copy().execf
+ mc = ctx2.parents()[0].manifest().copy()
+ execf2 = mc.execf
+ linkf2 = mc.linkf
# returns False if there was no rename between ctx1 and ctx2
# returns None if the file was created between ctx1 and ctx2
@@ -558,8 +1213,8 @@
if f not in removed:
tn = getfilectx(f, ctx2).data()
if opts.git:
- def gitmode(x):
- return x and '100755' or '100644'
+ def gitmode(x, l):
+ return l and '120000' or (x and '100755' or '100644')
def addmodehdr(header, omode, nmode):
if omode != nmode:
header.append('old mode %s\n' % omode)
@@ -567,10 +1222,10 @@
a, b = f, f
if f in added:
- mode = gitmode(execf2(f))
+ mode = gitmode(execf2(f), linkf2(f))
if f in copied:
a = copied[f]
- omode = gitmode(man1.execf(a))
+ omode = gitmode(man1.execf(a), man1.linkf(a))
addmodehdr(header, omode, mode)
if a in removed and a not in gone:
op = 'rename'
@@ -588,11 +1243,11 @@
if f in srcs:
dodiff = False
else:
- mode = gitmode(man1.execf(f))
+ mode = gitmode(man1.execf(f), man1.linkf(f))
header.append('deleted file mode %s\n' % mode)
else:
- omode = gitmode(man1.execf(f))
- nmode = gitmode(execf2(f))
+ omode = gitmode(man1.execf(f), man1.linkf(f))
+ nmode = gitmode(execf2(f), linkf2(f))
addmodehdr(header, omode, nmode)
if util.binary(to) or util.binary(tn):
dodiff = 'binary'
@@ -600,7 +1255,7 @@
header.insert(0, 'diff --git a/%s b/%s\n' % (a, b))
if dodiff:
if dodiff == 'binary':
- text = b85diff(fp, to, tn)
+ text = b85diff(to, tn)
else:
text = mdiff.unidiff(to, date1,
# ctx2 date may be dynamic
--- a/mercurial/revlog.py Wed Aug 08 22:47:30 2007 +0200
+++ b/mercurial/revlog.py Wed Aug 08 23:00:01 2007 +0200
@@ -15,17 +15,34 @@
import binascii, changegroup, errno, ancestor, mdiff, os
import sha, struct, util, zlib
-# revlog version strings
-REVLOGV0 = 0
-REVLOGNG = 1
+_pack = struct.pack
+_unpack = struct.unpack
+_compress = zlib.compress
+_decompress = zlib.decompress
+_sha = sha.new
# revlog flags
+REVLOGV0 = 0
+REVLOGNG = 1
REVLOGNGINLINEDATA = (1 << 16)
REVLOG_DEFAULT_FLAGS = REVLOGNGINLINEDATA
-
REVLOG_DEFAULT_FORMAT = REVLOGNG
REVLOG_DEFAULT_VERSION = REVLOG_DEFAULT_FORMAT | REVLOG_DEFAULT_FLAGS
+class RevlogError(Exception):
+ pass
+class LookupError(RevlogError):
+ pass
+
+def getoffset(q):
+ return int(q >> 16)
+
+def gettype(q):
+ return int(q & 0xFFFF)
+
+def offset_type(offset, type):
+ return long(long(offset) << 16 | type)
+
def hash(text, p1, p2):
"""generate a hash from the given text and its parent hashes
@@ -35,48 +52,39 @@
"""
l = [p1, p2]
l.sort()
- s = sha.new(l[0])
+ s = _sha(l[0])
s.update(l[1])
s.update(text)
return s.digest()
def compress(text):
""" generate a possibly-compressed representation of text """
- if not text: return ("", text)
+ if not text:
+ return ("", text)
if len(text) < 44:
- if text[0] == '\0': return ("", text)
+ if text[0] == '\0':
+ return ("", text)
return ('u', text)
- bin = zlib.compress(text)
+ bin = _compress(text)
if len(bin) > len(text):
- if text[0] == '\0': return ("", text)
+ if text[0] == '\0':
+ return ("", text)
return ('u', text)
return ("", bin)
def decompress(bin):
""" decompress the given input """
- if not bin: return bin
+ if not bin:
+ return bin
t = bin[0]
- if t == '\0': return bin
- if t == 'x': return zlib.decompress(bin)
- if t == 'u': return bin[1:]
+ if t == '\0':
+ return bin
+ if t == 'x':
+ return _decompress(bin)
+ if t == 'u':
+ return bin[1:]
raise RevlogError(_("unknown compression type %r") % t)
-indexformatv0 = ">4l20s20s20s"
-v0shaoffset = 56
-# index ng:
-# 6 bytes offset
-# 2 bytes flags
-# 4 bytes compressed length
-# 4 bytes uncompressed length
-# 4 bytes: base rev
-# 4 bytes link rev
-# 4 bytes parent 1 rev
-# 4 bytes parent 2 rev
-# 32 bytes: nodeid
-indexformatng = ">Qiiiiii20s12x"
-ngshaoffset = 32
-versionformat = ">I"
-
class lazyparser(object):
"""
this class avoids the need to parse the entirety of large indices
@@ -88,11 +96,9 @@
safe_to_use = os.name != 'nt' or (not util.is_win_9x() and
hasattr(util, 'win32api'))
- def __init__(self, dataf, size, indexformat, shaoffset):
+ def __init__(self, dataf, size):
self.dataf = dataf
- self.format = indexformat
- self.s = struct.calcsize(indexformat)
- self.indexformat = indexformat
+ self.s = struct.calcsize(indexformatng)
self.datasize = size
self.l = size/self.s
self.index = [None] * self.l
@@ -100,7 +106,6 @@
self.allmap = 0
self.all = 0
self.mapfind_count = 0
- self.shaoffset = shaoffset
def loadmap(self):
"""
@@ -109,7 +114,8 @@
which is fairly slow. loadmap can load up just the node map,
which takes much less time.
"""
- if self.allmap: return
+ if self.allmap:
+ return
end = self.datasize
self.allmap = 1
cur = 0
@@ -120,7 +126,7 @@
data = self.dataf.read(blocksize)
off = 0
for x in xrange(256):
- n = data[off + self.shaoffset:off + self.shaoffset + 20]
+ n = data[off + ngshaoffset:off + ngshaoffset + 20]
self.map[n] = count
count += 1
if count >= self.l:
@@ -129,7 +135,8 @@
cur += blocksize
def loadblock(self, blockstart, blocksize, data=None):
- if self.all: return
+ if self.all:
+ return
if data is None:
self.dataf.seek(blockstart)
if blockstart + blocksize > self.datasize:
@@ -148,13 +155,14 @@
if self.index[i + x] == None:
b = data[off : off + self.s]
self.index[i + x] = b
- n = b[self.shaoffset:self.shaoffset + 20]
+ n = b[ngshaoffset:ngshaoffset + 20]
self.map[n] = i + x
off += self.s
def findnode(self, node):
"""search backwards through the index file for a specific node"""
- if self.allmap: return None
+ if self.allmap:
+ return None
# hg log will cause many many searches for the manifest
# nodes. After we get called a few times, just load the whole
@@ -180,14 +188,14 @@
data = self.dataf.read(end - start)
findend = end - start
while True:
- # we're searching backwards, so weh have to make sure
+ # we're searching backwards, so we have to make sure
# we don't find a changeset where this node is a parent
- off = data.rfind(node, 0, findend)
+ off = data.find(node, 0, findend)
findend = off
if off >= 0:
i = off / self.s
off = i * self.s
- n = data[off + self.shaoffset:off + self.shaoffset + 20]
+ n = data[off + ngshaoffset:off + ngshaoffset + 20]
if n == node:
self.map[n] = i + start / self.s
return node
@@ -197,11 +205,12 @@
return None
def loadindex(self, i=None, end=None):
- if self.all: return
+ if self.all:
+ return
all = False
if i == None:
blockstart = 0
- blocksize = (512 / self.s) * self.s
+ blocksize = (65536 / self.s) * self.s
end = self.datasize
all = True
else:
@@ -210,13 +219,14 @@
end = end * self.s
blocksize = end - blockstart
else:
- blockstart = (i & ~63) * self.s
- blocksize = self.s * 64
+ blockstart = (i & ~1023) * self.s
+ blocksize = self.s * 1024
end = blockstart + blocksize
while blockstart < end:
self.loadblock(blockstart, blocksize)
blockstart += blocksize
- if all: self.all = True
+ if all:
+ self.all = True
class lazyindex(object):
"""a lazy version of the index array"""
@@ -230,16 +240,15 @@
self.p.loadindex(pos)
return self.p.index[pos]
def __getitem__(self, pos):
- ret = self.p.index[pos] or self.load(pos)
- if isinstance(ret, str):
- ret = struct.unpack(self.p.indexformat, ret)
- return ret
+ return _unpack(indexformatng, self.p.index[pos] or self.load(pos))
def __setitem__(self, pos, item):
- self.p.index[pos] = item
+ self.p.index[pos] = _pack(indexformatng, *item)
def __delitem__(self, pos):
del self.p.index[pos]
+ def insert(self, pos, e):
+ self.p.index.insert(pos, _pack(indexformatng, *e))
def append(self, e):
- self.p.index.append(e)
+ self.p.index.append(_pack(indexformatng, *e))
class lazymap(object):
"""a lazy version of the node map"""
@@ -262,8 +271,8 @@
self.p.loadindex(i)
ret = self.p.index[i]
if isinstance(ret, str):
- ret = struct.unpack(self.p.indexformat, ret)
- yield ret[-1]
+ ret = _unpack(indexformatng, ret)
+ yield ret[7]
def __getitem__(self, key):
try:
return self.p.map[key]
@@ -278,8 +287,112 @@
def __delitem__(self, key):
del self.p.map[key]
-class RevlogError(Exception): pass
-class LookupError(RevlogError): pass
+indexformatv0 = ">4l20s20s20s"
+v0shaoffset = 56
+
+class revlogoldio(object):
+ def __init__(self):
+ self.size = struct.calcsize(indexformatv0)
+
+ def parseindex(self, fp, inline):
+ s = self.size
+ index = []
+ nodemap = {nullid: nullrev}
+ n = off = 0
+ data = fp.read()
+ l = len(data)
+ while off + s <= l:
+ cur = data[off:off + s]
+ off += s
+ e = _unpack(indexformatv0, cur)
+ # transform to revlogv1 format
+ e2 = (offset_type(e[0], 0), e[1], -1, e[2], e[3],
+ nodemap[e[4]], nodemap[e[5]], e[6])
+ index.append(e2)
+ nodemap[e[6]] = n
+ n += 1
+
+ return index, nodemap, None
+
+ def packentry(self, entry, node, version):
+ e2 = (getoffset(entry[0]), entry[1], entry[3], entry[4],
+ node(entry[5]), node(entry[6]), entry[7])
+ return _pack(indexformatv0, *e2)
+
+# index ng:
+# 6 bytes offset
+# 2 bytes flags
+# 4 bytes compressed length
+# 4 bytes uncompressed length
+# 4 bytes: base rev
+# 4 bytes link rev
+# 4 bytes parent 1 rev
+# 4 bytes parent 2 rev
+# 32 bytes: nodeid
+indexformatng = ">Qiiiiii20s12x"
+ngshaoffset = 32
+versionformat = ">I"
+
+class revlogio(object):
+ def __init__(self):
+ self.size = struct.calcsize(indexformatng)
+
+ def parseindex(self, fp, inline):
+ try:
+ size = util.fstat(fp).st_size
+ except AttributeError:
+ size = 0
+
+ if lazyparser.safe_to_use and not inline and size > 1000000:
+ # big index, let's parse it on demand
+ parser = lazyparser(fp, size)
+ index = lazyindex(parser)
+ nodemap = lazymap(parser)
+ e = list(index[0])
+ type = gettype(e[0])
+ e[0] = offset_type(0, type)
+ index[0] = e
+ return index, nodemap, None
+
+ s = self.size
+ cache = None
+ index = []
+ nodemap = {nullid: nullrev}
+ n = off = 0
+ # if we're not using lazymap, always read the whole index
+ data = fp.read()
+ l = len(data) - s
+ append = index.append
+ if inline:
+ cache = (0, data)
+ while off <= l:
+ e = _unpack(indexformatng, data[off:off + s])
+ nodemap[e[7]] = n
+ append(e)
+ n += 1
+ if e[1] < 0:
+ break
+ off += e[1] + s
+ else:
+ while off <= l:
+ e = _unpack(indexformatng, data[off:off + s])
+ nodemap[e[7]] = n
+ append(e)
+ n += 1
+ off += s
+
+ e = list(index[0])
+ type = gettype(e[0])
+ e[0] = offset_type(0, type)
+ index[0] = e
+
+ return index, nodemap, cache
+
+ def packentry(self, entry, node, version):
+ p = _pack(indexformatng, *entry)
+ if not entry[3] and not getoffset(entry[0]) and entry[5] == nullrev:
+ p = _pack(versionformat, version) + p[4:]
+ return p
class revlog(object):
"""
@@ -316,200 +429,101 @@
self.indexfile = indexfile
self.datafile = indexfile[:-2] + ".d"
self.opener = opener
+ self._cache = None
+ self._chunkcache = None
+ self.nodemap = {nullid: nullrev}
+ self.index = []
- self.indexstat = None
- self.cache = None
- self.chunkcache = None
- self.defversion = REVLOG_DEFAULT_VERSION
+ v = REVLOG_DEFAULT_VERSION
if hasattr(opener, "defversion"):
- self.defversion = opener.defversion
- if self.defversion & REVLOGNG:
- self.defversion |= REVLOGNGINLINEDATA
- self.load()
+ v = opener.defversion
+ if v & REVLOGNG:
+ v |= REVLOGNGINLINEDATA
- def load(self):
- v = self.defversion
+ i = ""
try:
f = self.opener(self.indexfile)
i = f.read(4)
f.seek(0)
+ if len(i) > 0:
+ v = struct.unpack(versionformat, i)[0]
except IOError, inst:
if inst.errno != errno.ENOENT:
raise
- i = ""
- else:
- try:
- st = util.fstat(f)
- except AttributeError, inst:
- st = None
- else:
- oldst = self.indexstat
- if (oldst and st.st_dev == oldst.st_dev
- and st.st_ino == oldst.st_ino
- and st.st_mtime == oldst.st_mtime
- and st.st_ctime == oldst.st_ctime
- and st.st_size == oldst.st_size):
- return
- self.indexstat = st
- if len(i) > 0:
- v = struct.unpack(versionformat, i)[0]
+
+ self.version = v
+ self._inline = v & REVLOGNGINLINEDATA
flags = v & ~0xFFFF
fmt = v & 0xFFFF
- if fmt == REVLOGV0:
- if flags:
- raise RevlogError(_("index %s unknown flags %#04x for format v0")
- % (self.indexfile, flags >> 16))
- elif fmt == REVLOGNG:
- if flags & ~REVLOGNGINLINEDATA:
- raise RevlogError(_("index %s unknown flags %#04x for revlogng")
- % (self.indexfile, flags >> 16))
- else:
+ if fmt == REVLOGV0 and flags:
+ raise RevlogError(_("index %s unknown flags %#04x for format v0")
+ % (self.indexfile, flags >> 16))
+ elif fmt == REVLOGNG and flags & ~REVLOGNGINLINEDATA:
+ raise RevlogError(_("index %s unknown flags %#04x for revlogng")
+ % (self.indexfile, flags >> 16))
+ elif fmt > REVLOGNG:
raise RevlogError(_("index %s unknown format %d")
% (self.indexfile, fmt))
- self.version = v
- if v == REVLOGV0:
- self.indexformat = indexformatv0
- shaoffset = v0shaoffset
- else:
- self.indexformat = indexformatng
- shaoffset = ngshaoffset
-
- if i:
- if (lazyparser.safe_to_use and not self.inlinedata() and
- st and st.st_size > 10000):
- # big index, let's parse it on demand
- parser = lazyparser(f, st.st_size, self.indexformat, shaoffset)
- self.index = lazyindex(parser)
- self.nodemap = lazymap(parser)
- else:
- self.parseindex(f, st)
- if self.version != REVLOGV0:
- e = list(self.index[0])
- type = self.ngtype(e[0])
- e[0] = self.offset_type(0, type)
- self.index[0] = e
- else:
- self.nodemap = {nullid: nullrev}
- self.index = []
-
- def parseindex(self, fp, st):
- s = struct.calcsize(self.indexformat)
- self.index = []
- self.nodemap = {nullid: nullrev}
- inline = self.inlinedata()
- n = 0
- leftover = None
- while True:
- if st:
- data = fp.read(65536)
- else:
- # hack for httprangereader, it doesn't do partial reads well
- data = fp.read()
- if not data:
- break
- if n == 0 and self.inlinedata():
- # cache the first chunk
- self.chunkcache = (0, data)
- if leftover:
- data = leftover + data
- leftover = None
- off = 0
- l = len(data)
- while off < l:
- if l - off < s:
- leftover = data[off:]
- break
- cur = data[off:off + s]
- off += s
- e = struct.unpack(self.indexformat, cur)
- self.index.append(e)
- self.nodemap[e[-1]] = n
- n += 1
- if inline:
- if e[1] < 0:
- break
- off += e[1]
- if off > l:
- # some things don't seek well, just read it
- fp.read(off - l)
- break
- if not st:
- break
+ self._io = revlogio()
+ if self.version == REVLOGV0:
+ self._io = revlogoldio()
+ if i:
+ d = self._io.parseindex(f, self._inline)
+ self.index, self.nodemap, self._chunkcache = d
-
- def ngoffset(self, q):
- if q & 0xFFFF:
- raise RevlogError(_('%s: incompatible revision flag %x') %
- (self.indexfile, q))
- return long(q >> 16)
+ # add the magic null revision at -1
+ self.index.append((0, 0, 0, -1, -1, -1, -1, nullid))
- def ngtype(self, q):
- return int(q & 0xFFFF)
-
- def offset_type(self, offset, type):
- return long(long(offset) << 16 | type)
-
- def loadindex(self, start, end):
+ def _loadindex(self, start, end):
"""load a block of indexes all at once from the lazy parser"""
if isinstance(self.index, lazyindex):
self.index.p.loadindex(start, end)
- def loadindexmap(self):
+ def _loadindexmap(self):
"""loads both the map and the index from the lazy parser"""
if isinstance(self.index, lazyindex):
p = self.index.p
p.loadindex()
self.nodemap = p.map
- def loadmap(self):
+ def _loadmap(self):
"""loads the map from the lazy parser"""
if isinstance(self.nodemap, lazymap):
self.nodemap.p.loadmap()
self.nodemap = self.nodemap.p.map
- def inlinedata(self): return self.version & REVLOGNGINLINEDATA
- def tip(self): return self.node(len(self.index) - 1)
- def count(self): return len(self.index)
- def node(self, rev):
- return rev == nullrev and nullid or self.index[rev][-1]
+ def tip(self):
+ return self.node(len(self.index) - 2)
+ def count(self):
+ return len(self.index) - 1
+
def rev(self, node):
try:
return self.nodemap[node]
except KeyError:
raise LookupError(_('%s: no node %s') % (self.indexfile, hex(node)))
+ def node(self, rev):
+ return self.index[rev][7]
def linkrev(self, node):
- return (node == nullid) and nullrev or self.index[self.rev(node)][-4]
+ return self.index[self.rev(node)][4]
def parents(self, node):
- if node == nullid: return (nullid, nullid)
- r = self.rev(node)
- d = self.index[r][-3:-1]
- if self.version == REVLOGV0:
- return d
+ d = self.index[self.rev(node)][5:7]
return (self.node(d[0]), self.node(d[1]))
def parentrevs(self, rev):
- if rev == nullrev:
- return (nullrev, nullrev)
- d = self.index[rev][-3:-1]
- if self.version == REVLOGV0:
- return (self.rev(d[0]), self.rev(d[1]))
- return d
+ return self.index[rev][5:7]
def start(self, rev):
- if rev == nullrev:
- return 0
- if self.version != REVLOGV0:
- return self.ngoffset(self.index[rev][0])
- return self.index[rev][0]
-
- def end(self, rev): return self.start(rev) + self.length(rev)
+ return int(self.index[rev][0] >> 16)
+ def end(self, rev):
+ return self.start(rev) + self.length(rev)
+ def length(self, rev):
+ return self.index[rev][1]
+ def base(self, rev):
+ return self.index[rev][3]
def size(self, rev):
"""return the length of the uncompressed text for a given revision"""
- if rev == nullrev:
- return 0
- l = -1
- if self.version != REVLOGV0:
- l = self.index[rev][2]
+ l = self.index[rev][2]
if l >= 0:
return l
@@ -536,17 +550,6 @@
return l
"""
- def length(self, rev):
- if rev == nullrev:
- return 0
- else:
- return self.index[rev][1]
- def base(self, rev):
- if (rev == nullrev):
- return nullrev
- else:
- return self.index[rev][-5]
-
def reachable(self, node, stop=None):
"""return a hash of all nodes ancestral to a given node, including
the node itself, stopping when stop is matched"""
@@ -730,6 +733,17 @@
if stop is specified, it will consider all the revs from stop
as if they had no children
"""
+ if start is None and stop is None:
+ count = self.count()
+ if not count:
+ return [nullid]
+ ishead = [1] * (count + 1)
+ index = self.index
+ for r in xrange(count):
+ e = index[r]
+ ishead[e[5]] = ishead[e[6]] = 0
+ return [self.node(r) for r in xrange(count) if ishead[r]]
+
if start is None:
start = nullid
if stop is None:
@@ -781,9 +795,12 @@
try:
# str(rev)
rev = int(id)
- if str(rev) != id: raise ValueError
- if rev < 0: rev = self.count() + rev
- if rev < 0 or rev >= self.count(): raise ValueError
+ if str(rev) != id:
+ raise ValueError
+ if rev < 0:
+ rev = self.count() + rev
+ if rev < 0 or rev >= self.count():
+ raise ValueError
return self.node(rev)
except (ValueError, OverflowError):
pass
@@ -817,7 +834,6 @@
- revision number or str(revision number)
- nodeid or subset of hex nodeid
"""
-
n = self._match(id)
if n is not None:
return n
@@ -832,56 +848,42 @@
p1, p2 = self.parents(node)
return hash(text, p1, p2) != node
- def makenode(self, node, text):
- """calculate a file nodeid for text, descended or possibly
- unchanged from node"""
-
- if self.cmp(node, text):
- return hash(text, node, nullid)
- return node
-
- def diff(self, a, b):
- """return a delta between two revisions"""
- return mdiff.textdiff(a, b)
-
- def patches(self, t, pl):
- """apply a list of patches to a string"""
- return mdiff.patches(t, pl)
-
- def chunk(self, rev, df=None, cachelen=4096):
- start, length = self.start(rev), self.length(rev)
- inline = self.inlinedata()
- if inline:
- start += (rev + 1) * struct.calcsize(self.indexformat)
- end = start + length
+ def chunk(self, rev, df=None):
def loadcache(df):
- cache_length = max(cachelen, length) # 4k
if not df:
- if inline:
+ if self._inline:
df = self.opener(self.indexfile)
else:
df = self.opener(self.datafile)
df.seek(start)
- self.chunkcache = (start, df.read(cache_length))
+ self._chunkcache = (start, df.read(cache_length))
- if not self.chunkcache:
- loadcache(df)
+ start, length = self.start(rev), self.length(rev)
+ if self._inline:
+ start += (rev + 1) * self._io.size
+ end = start + length
- cache_start = self.chunkcache[0]
- cache_end = cache_start + len(self.chunkcache[1])
- if start >= cache_start and end <= cache_end:
- # it is cached
- offset = start - cache_start
- else:
+ offset = 0
+ if not self._chunkcache:
+ cache_length = max(65536, length)
loadcache(df)
- offset = 0
+ else:
+ cache_start = self._chunkcache[0]
+ cache_length = len(self._chunkcache[1])
+ cache_end = cache_start + cache_length
+ if start >= cache_start and end <= cache_end:
+ # it is cached
+ offset = start - cache_start
+ else:
+ cache_length = max(65536, length)
+ loadcache(df)
- #def checkchunk():
- # df = self.opener(self.datafile)
- # df.seek(start)
- # return df.read(length)
- #assert s == checkchunk()
- return decompress(self.chunkcache[1][offset:offset + length])
+ # avoid copying large chunks
+ c = self._chunkcache[1]
+ if cache_length != length:
+ c = c[offset:offset + length]
+
+ return decompress(c)
def delta(self, node):
"""return or calculate a delta between a node and its predecessor"""
@@ -890,55 +892,55 @@
def revdiff(self, rev1, rev2):
"""return or calculate a delta between two revisions"""
- b1 = self.base(rev1)
- b2 = self.base(rev2)
- if b1 == b2 and rev1 + 1 == rev2:
+ if rev1 + 1 == rev2 and self.base(rev1) == self.base(rev2):
return self.chunk(rev2)
- else:
- return self.diff(self.revision(self.node(rev1)),
- self.revision(self.node(rev2)))
+
+ return mdiff.textdiff(self.revision(self.node(rev1)),
+ self.revision(self.node(rev2)))
def revision(self, node):
"""return an uncompressed revision of a given"""
- if node == nullid: return ""
- if self.cache and self.cache[0] == node: return self.cache[2]
+ if node == nullid:
+ return ""
+ if self._cache and self._cache[0] == node:
+ return self._cache[2]
# look up what we need to read
text = None
rev = self.rev(node)
base = self.base(rev)
- if self.inlinedata():
+ # check rev flags
+ if self.index[rev][0] & 0xFFFF:
+ raise RevlogError(_('incompatible revision flag %x') % q)
+
+ if self._inline:
# we probably have the whole chunk cached
df = None
else:
df = self.opener(self.datafile)
# do we have useful data cached?
- if self.cache and self.cache[1] >= base and self.cache[1] < rev:
- base = self.cache[1]
- text = self.cache[2]
- self.loadindex(base, rev + 1)
+ if self._cache and self._cache[1] >= base and self._cache[1] < rev:
+ base = self._cache[1]
+ text = self._cache[2]
+ self._loadindex(base, rev + 1)
else:
- self.loadindex(base, rev + 1)
+ self._loadindex(base, rev + 1)
text = self.chunk(base, df=df)
- bins = []
- for r in xrange(base + 1, rev + 1):
- bins.append(self.chunk(r, df=df))
-
- text = self.patches(text, bins)
-
+ bins = [self.chunk(r, df) for r in xrange(base + 1, rev + 1)]
+ text = mdiff.patches(text, bins)
p1, p2 = self.parents(node)
if node != hash(text, p1, p2):
raise RevlogError(_("integrity check failed on %s:%d")
% (self.datafile, rev))
- self.cache = (node, rev, text)
+ self._cache = (node, rev, text)
return text
def checkinlinesize(self, tr, fp=None):
- if not self.inlinedata():
+ if not self._inline:
return
if not fp:
fp = self.opener(self.indexfile, 'r')
@@ -956,7 +958,7 @@
tr.add(self.datafile, dataoff)
df = self.opener(self.datafile, 'w')
- calc = struct.calcsize(self.indexformat)
+ calc = self._io.size
for r in xrange(self.count()):
start = self.start(r) + (r + 1) * calc
length = self.length(r)
@@ -967,16 +969,9 @@
df.close()
fp = self.opener(self.indexfile, 'w', atomictemp=True)
self.version &= ~(REVLOGNGINLINEDATA)
- if self.count():
- x = self.index[0]
- e = struct.pack(self.indexformat, *x)[4:]
- l = struct.pack(versionformat, self.version)
- fp.write(l)
- fp.write(e)
-
- for i in xrange(1, self.count()):
- x = self.index[i]
- e = struct.pack(self.indexformat, *x)
+ self._inline = False
+ for i in xrange(self.count()):
+ e = self._io.packentry(self.index[i], self.node, self.version)
fp.write(e)
# if we don't call rename, the temp file will never replace the
@@ -984,9 +979,9 @@
fp.rename()
tr.replace(self.indexfile, trindex * calc)
- self.chunkcache = None
+ self._chunkcache = None
- def addrevision(self, text, transaction, link, p1=None, p2=None, d=None):
+ def addrevision(self, text, transaction, link, p1, p2, d=None):
"""add a revision to the log
text - the revision data to add
@@ -995,84 +990,60 @@
p1, p2 - the parent nodeids of the revision
d - an optional precomputed delta
"""
- if not self.inlinedata():
+ dfh = None
+ if not self._inline:
dfh = self.opener(self.datafile, "a")
- else:
- dfh = None
ifh = self.opener(self.indexfile, "a+")
return self._addrevision(text, transaction, link, p1, p2, d, ifh, dfh)
def _addrevision(self, text, transaction, link, p1, p2, d, ifh, dfh):
- if text is None: text = ""
- if p1 is None: p1 = self.tip()
- if p2 is None: p2 = nullid
-
node = hash(text, p1, p2)
-
if node in self.nodemap:
return node
- n = self.count()
- t = n - 1
+ curr = self.count()
+ prev = curr - 1
+ base = self.base(prev)
+ offset = self.end(prev)
- if n:
- base = self.base(t)
- start = self.start(base)
- end = self.end(t)
+ if curr:
if not d:
- prev = self.revision(self.tip())
- d = self.diff(prev, text)
+ ptext = self.revision(self.node(prev))
+ d = mdiff.textdiff(ptext, text)
data = compress(d)
l = len(data[1]) + len(data[0])
- dist = end - start + l
+ dist = l + offset - self.start(base)
# full versions are inserted when the needed deltas
# become comparable to the uncompressed text
- if not n or dist > len(text) * 2:
+ if not curr or dist > len(text) * 2:
data = compress(text)
l = len(data[1]) + len(data[0])
- base = n
- else:
- base = self.base(t)
-
- offset = 0
- if t >= 0:
- offset = self.end(t)
+ base = curr
- if self.version == REVLOGV0:
- e = (offset, l, base, link, p1, p2, node)
- else:
- e = (self.offset_type(offset, 0), l, len(text),
- base, link, self.rev(p1), self.rev(p2), node)
+ e = (offset_type(offset, 0), l, len(text),
+ base, link, self.rev(p1), self.rev(p2), node)
+ self.index.insert(-1, e)
+ self.nodemap[node] = curr
- self.index.append(e)
- self.nodemap[node] = n
- entry = struct.pack(self.indexformat, *e)
-
- if not self.inlinedata():
+ entry = self._io.packentry(e, self.node, self.version)
+ if not self._inline:
transaction.add(self.datafile, offset)
- transaction.add(self.indexfile, n * len(entry))
+ transaction.add(self.indexfile, curr * len(entry))
if data[0]:
dfh.write(data[0])
dfh.write(data[1])
dfh.flush()
+ ifh.write(entry)
else:
- ifh.seek(0, 2)
- transaction.add(self.indexfile, ifh.tell(), self.count() - 1)
-
- if len(self.index) == 1 and self.version != REVLOGV0:
- l = struct.pack(versionformat, self.version)
- ifh.write(l)
- entry = entry[4:]
-
- ifh.write(entry)
-
- if self.inlinedata():
+ offset += curr * self._io.size
+ transaction.add(self.indexfile, offset, prev)
+ ifh.write(entry)
ifh.write(data[0])
ifh.write(data[1])
self.checkinlinesize(transaction, ifh)
- self.cache = (node, n, text)
+ self._cache = (node, curr, text)
return node
def ancestor(self, a, b):
@@ -1142,11 +1113,12 @@
end = self.end(t)
ifh = self.opener(self.indexfile, "a+")
- ifh.seek(0, 2)
- transaction.add(self.indexfile, ifh.tell(), self.count())
- if self.inlinedata():
+ isize = r * self._io.size
+ if self._inline:
+ transaction.add(self.indexfile, end + isize, r)
dfh = None
else:
+ transaction.add(self.indexfile, isize, r)
transaction.add(self.datafile, end)
dfh = self.opener(self.datafile, "a")
@@ -1190,10 +1162,10 @@
dfh.flush()
ifh.flush()
text = self.revision(chain)
- text = self.patches(text, [delta])
+ text = mdiff.patches(text, [delta])
chk = self._addrevision(text, transaction, link, p1, p2, None,
ifh, dfh)
- if not dfh and not self.inlinedata():
+ if not dfh and not self._inline:
# addrevision switched from inline to conventional
# reopen the index
dfh = self.opener(self.datafile, "a")
@@ -1202,23 +1174,21 @@
raise RevlogError(_("consistency error adding group"))
textlen = len(text)
else:
- if self.version == REVLOGV0:
- e = (end, len(cdelta), base, link, p1, p2, node)
- else:
- e = (self.offset_type(end, 0), len(cdelta), textlen, base,
- link, self.rev(p1), self.rev(p2), node)
- self.index.append(e)
+ e = (offset_type(end, 0), len(cdelta), textlen, base,
+ link, self.rev(p1), self.rev(p2), node)
+ self.index.insert(-1, e)
self.nodemap[node] = r
- if self.inlinedata():
- ifh.write(struct.pack(self.indexformat, *e))
+ entry = self._io.packentry(e, self.node, self.version)
+ if self._inline:
+ ifh.write(entry)
ifh.write(cdelta)
self.checkinlinesize(transaction, ifh)
- if not self.inlinedata():
+ if not self._inline:
dfh = self.opener(self.datafile, "a")
ifh = self.opener(self.indexfile, "a")
else:
dfh.write(cdelta)
- ifh.write(struct.pack(self.indexformat, *e))
+ ifh.write(entry)
t, r, chain, prev = r, r + 1, node, node
base = self.base(t)
@@ -1232,36 +1202,36 @@
return
if isinstance(self.index, lazyindex):
- self.loadindexmap()
+ self._loadindexmap()
# When stripping away a revision, we need to make sure it
# does not actually belong to an older changeset.
# The minlink parameter defines the oldest revision
# we're allowed to strip away.
- while minlink > self.index[rev][-4]:
+ while minlink > self.index[rev][4]:
rev += 1
if rev >= self.count():
return
# first truncate the files on disk
end = self.start(rev)
- if not self.inlinedata():
+ if not self._inline:
df = self.opener(self.datafile, "a")
df.truncate(end)
- end = rev * struct.calcsize(self.indexformat)
+ end = rev * self._io.size
else:
- end += rev * struct.calcsize(self.indexformat)
+ end += rev * self._io.size
indexf = self.opener(self.indexfile, "a")
indexf.truncate(end)
# then reset internal state in memory to forget those revisions
- self.cache = None
- self.chunkcache = None
+ self._cache = None
+ self._chunkcache = None
for x in xrange(rev, self.count()):
del self.nodemap[self.node(x)]
- del self.index[rev:]
+ del self.index[rev:-1]
def checksize(self):
expected = 0
@@ -1282,10 +1252,10 @@
f = self.opener(self.indexfile)
f.seek(0, 2)
actual = f.tell()
- s = struct.calcsize(self.indexformat)
+ s = self._io.size
i = actual / s
di = actual - (i * s)
- if self.inlinedata():
+ if self._inline:
databytes = 0
for r in xrange(self.count()):
databytes += self.length(r)
@@ -1297,5 +1267,3 @@
di = 0
return (dd, di)
-
-
--- a/mercurial/statichttprepo.py Wed Aug 08 22:47:30 2007 +0200
+++ b/mercurial/statichttprepo.py Wed Aug 08 23:00:01 2007 +0200
@@ -75,10 +75,4 @@
def instance(ui, path, create):
if create:
raise util.Abort(_('cannot create new static-http repository'))
- if path.startswith('old-http:'):
- ui.warn(_("old-http:// syntax is deprecated, "
- "please use static-http:// instead\n"))
- path = path[4:]
- else:
- path = path[7:]
- return statichttprepository(ui, path)
+ return statichttprepository(ui, path[7:])
--- a/mercurial/streamclone.py Wed Aug 08 22:47:30 2007 +0200
+++ b/mercurial/streamclone.py Wed Aug 08 23:00:01 2007 +0200
@@ -66,22 +66,25 @@
# get consistent snapshot of repo. lock during scan so lock not
# needed while we stream, and commits can happen.
+ lock = None
try:
- repolock = repo.lock()
- except (lock.LockHeld, lock.LockUnavailable), inst:
- repo.ui.warn('locking the repository failed: %s\n' % (inst,))
- fileobj.write('2\n')
- return
+ try:
+ repolock = repo.lock()
+ except (lock.LockHeld, lock.LockUnavailable), inst:
+ repo.ui.warn('locking the repository failed: %s\n' % (inst,))
+ fileobj.write('2\n')
+ return
- fileobj.write('0\n')
- repo.ui.debug('scanning\n')
- entries = []
- total_bytes = 0
- for name, size in walkrepo(repo.spath):
- name = repo.decodefn(util.pconvert(name))
- entries.append((name, size))
- total_bytes += size
- repolock.release()
+ fileobj.write('0\n')
+ repo.ui.debug('scanning\n')
+ entries = []
+ total_bytes = 0
+ for name, size in walkrepo(repo.spath):
+ name = repo.decodefn(util.pconvert(name))
+ entries.append((name, size))
+ total_bytes += size
+ finally:
+ del repolock
repo.ui.debug('%d files, %d bytes to transfer\n' %
(len(entries), total_bytes))
--- a/mercurial/ui.py Wed Aug 08 22:47:30 2007 +0200
+++ b/mercurial/ui.py Wed Aug 08 23:00:01 2007 +0200
@@ -24,6 +24,8 @@
dest.set(section, name, value)
class ui(object):
+ _isatty = None
+
def __init__(self, verbose=False, debug=False, quiet=False,
interactive=True, traceback=False, report_untrusted=True,
parentui=None):
@@ -62,6 +64,11 @@
def __getattr__(self, key):
return getattr(self.parentui, key)
+ def isatty(self):
+ if ui._isatty is None:
+ ui._isatty = sys.stdin.isatty()
+ return ui._isatty
+
def updateopts(self, verbose=False, debug=False, quiet=False,
interactive=True, traceback=False, config=[]):
for section, name, value in config:
@@ -204,7 +211,9 @@
if name is None or name in ('quiet', 'verbose', 'debug'):
self.verbosity_constraints()
if name is None or name == 'interactive':
- self.interactive = self.configbool("ui", "interactive", True)
+ self.interactive = self.configbool("ui", "interactive", None)
+ if self.interactive is None:
+ self.interactive = self.isatty()
if name is None or name == 'report_untrusted':
self.report_untrusted = (
self.configbool("ui", "report_untrusted", True))
@@ -382,17 +391,29 @@
try: sys.stderr.flush()
except: pass
- def readline(self):
- return sys.stdin.readline()[:-1]
+ def readline(self, prompt=''):
+ if self.isatty():
+ try:
+ # magically add command line editing support, where
+ # available
+ import readline
+ # force demandimport to really load the module
+ readline.read_history_file
+ except ImportError:
+ pass
+ return raw_input(prompt)
+
def prompt(self, msg, pat=None, default="y"):
if not self.interactive: return default
- while 1:
- self.write(msg, " ")
- r = self.readline()
+ try:
+ r = self.readline(msg + ' ')
if not pat or re.match(pat, r):
return r
else:
self.write(_("unrecognized response\n"))
+ except EOFError:
+ raise util.Abort(_('response expected'))
+
def getpass(self, prompt=None, default=None):
if not self.interactive: return default
return getpass.getpass(prompt or _('password: '))
--- a/mercurial/util.py Wed Aug 08 22:47:30 2007 +0200
+++ b/mercurial/util.py Wed Aug 08 23:00:01 2007 +0200
@@ -63,7 +63,7 @@
Convert a string from the local character encoding to UTF-8
We attempt to decode strings using the encoding mode set by
- HG_ENCODINGMODE, which defaults to 'strict'. In this mode, unknown
+ HGENCODINGMODE, which defaults to 'strict'. In this mode, unknown
characters will cause an error message. Other modes include
'replace', which replaces unknown characters with a special
Unicode character, and 'ignore', which drops the character.
@@ -619,7 +619,7 @@
"""forcibly rename a file"""
try:
os.rename(src, dst)
- except OSError, err:
+ except OSError, err: # FIXME: check err (EEXIST ?)
# on windows, rename to existing file is not allowed, so we
# must delete destination first. but if file is open, unlink
# schedules it for delete but does not delete it. rename
@@ -1306,7 +1306,11 @@
os.makedirs(dirname)
if self._can_symlink:
- os.symlink(src, linkname)
+ try:
+ os.symlink(src, linkname)
+ except OSError, err:
+ raise OSError(err.errno, _('could not symlink to %r: %s') %
+ (src, err.strerror), linkname)
else:
f = self(dst, "w")
f.write(src)
--- a/mercurial/verify.py Wed Aug 08 22:47:30 2007 +0200
+++ b/mercurial/verify.py Wed Aug 08 23:00:01 2007 +0200
@@ -10,6 +10,13 @@
import revlog, mdiff
def verify(repo):
+ lock = repo.lock()
+ try:
+ return _verify(repo)
+ finally:
+ del lock
+
+def _verify(repo):
filelinkrevs = {}
filenodes = {}
changesets = revisions = files = 0
@@ -17,8 +24,6 @@
warnings = [0]
neededmanifests = {}
- lock = repo.lock()
-
def err(msg):
repo.ui.warn(msg + "\n")
errors[0] += 1
--- a/setup.py Wed Aug 08 22:47:30 2007 +0200
+++ b/setup.py Wed Aug 08 23:00:01 2007 +0200
@@ -2,8 +2,8 @@
#
# This is the mercurial setup script.
#
-# './setup.py install', or
-# './setup.py --help' for more options
+# 'python setup.py install', or
+# 'python setup.py --help' for more options
import sys
if not hasattr(sys, 'version_info') or sys.version_info < (2, 3, 0, 'final'):
@@ -64,7 +64,8 @@
packages=['mercurial', 'mercurial.hgweb', 'hgext', 'hgext.convert'],
ext_modules=[Extension('mercurial.mpatch', ['mercurial/mpatch.c']),
Extension('mercurial.bdiff', ['mercurial/bdiff.c']),
- Extension('mercurial.base85', ['mercurial/base85.c'])],
+ Extension('mercurial.base85', ['mercurial/base85.c']),
+ Extension('mercurial.diffhelpers', ['mercurial/diffhelpers.c'])],
data_files=[(os.path.join('mercurial', root),
[os.path.join(root, file_) for file_ in files])
for root, dirs, files in os.walk('templates')],
--- a/tests/coverage.py Wed Aug 08 22:47:30 2007 +0200
+++ b/tests/coverage.py Wed Aug 08 23:00:01 2007 +0200
@@ -504,7 +504,7 @@
def get_suite_spots(self, tree, spots):
import symbol, token
for i in range(1, len(tree)):
- if type(tree[i]) == type(()):
+ if isinstance(tree[i], tuple):
if tree[i][0] == symbol.suite:
# Found a suite, look back for the colon and keyword.
lineno_colon = lineno_word = None
--- a/tests/test-abort-checkin.out Wed Aug 08 22:47:30 2007 +0200
+++ b/tests/test-abort-checkin.out Wed Aug 08 23:00:01 2007 +0200
@@ -1,8 +1,8 @@
error: pretxncommit.nocommits hook failed: no commits allowed
-abort: no commits allowed
transaction abort!
rollback completed
+abort: no commits allowed
error: pretxncommit.nocommits hook failed: no commits allowed
-abort: no commits allowed
transaction abort!
rollback completed
+abort: no commits allowed
--- a/tests/test-acl.out Wed Aug 08 22:47:30 2007 +0200
+++ b/tests/test-acl.out Wed Aug 08 23:00:01 2007 +0200
@@ -129,9 +129,9 @@
acl: acl.deny not enabled
acl: user fred not allowed on foo/file.txt
error: pretxnchangegroup.acl hook failed: acl: access denied for changeset ef1ea85a6374
-abort: acl: access denied for changeset ef1ea85a6374
transaction abort!
rollback completed
+abort: acl: access denied for changeset ef1ea85a6374
no rollback information available
0:6675d58eff77
@@ -170,9 +170,9 @@
acl: allowing changeset f9cafe1212c8
acl: user fred not allowed on quux/file.py
error: pretxnchangegroup.acl hook failed: acl: access denied for changeset 911600dab2ae
-abort: acl: access denied for changeset 911600dab2ae
transaction abort!
rollback completed
+abort: acl: access denied for changeset 911600dab2ae
no rollback information available
0:6675d58eff77
@@ -210,9 +210,9 @@
acl: acl.deny enabled, 0 entries for user barney
acl: user barney not allowed on foo/file.txt
error: pretxnchangegroup.acl hook failed: acl: access denied for changeset ef1ea85a6374
-abort: acl: access denied for changeset ef1ea85a6374
transaction abort!
rollback completed
+abort: acl: access denied for changeset ef1ea85a6374
no rollback information available
0:6675d58eff77
@@ -253,9 +253,9 @@
acl: allowing changeset f9cafe1212c8
acl: user fred not allowed on quux/file.py
error: pretxnchangegroup.acl hook failed: acl: access denied for changeset 911600dab2ae
-abort: acl: access denied for changeset 911600dab2ae
transaction abort!
rollback completed
+abort: acl: access denied for changeset 911600dab2ae
no rollback information available
0:6675d58eff77
@@ -296,9 +296,9 @@
acl: allowing changeset ef1ea85a6374
acl: user fred denied on foo/Bar/file.txt
error: pretxnchangegroup.acl hook failed: acl: access denied for changeset f9cafe1212c8
-abort: acl: access denied for changeset f9cafe1212c8
transaction abort!
rollback completed
+abort: acl: access denied for changeset f9cafe1212c8
no rollback information available
0:6675d58eff77
@@ -338,9 +338,9 @@
acl: acl.deny enabled, 0 entries for user barney
acl: user barney not allowed on foo/file.txt
error: pretxnchangegroup.acl hook failed: acl: access denied for changeset ef1ea85a6374
-abort: acl: access denied for changeset ef1ea85a6374
transaction abort!
rollback completed
+abort: acl: access denied for changeset ef1ea85a6374
no rollback information available
0:6675d58eff77
@@ -427,9 +427,9 @@
acl: allowing changeset f9cafe1212c8
acl: user wilma not allowed on quux/file.py
error: pretxnchangegroup.acl hook failed: acl: access denied for changeset 911600dab2ae
-abort: acl: access denied for changeset 911600dab2ae
transaction abort!
rollback completed
+abort: acl: access denied for changeset 911600dab2ae
no rollback information available
0:6675d58eff77
@@ -471,9 +471,9 @@
added 3 changesets with 3 changes to 3 files
calling hook pretxnchangegroup.acl: hgext.acl.hook
error: pretxnchangegroup.acl hook failed: unable to open ../acl.config: No such file or directory
-abort: unable to open ../acl.config: No such file or directory
transaction abort!
rollback completed
+abort: unable to open ../acl.config: No such file or directory
no rollback information available
0:6675d58eff77
@@ -524,9 +524,9 @@
acl: allowing changeset f9cafe1212c8
acl: user betty not allowed on quux/file.py
error: pretxnchangegroup.acl hook failed: acl: access denied for changeset 911600dab2ae
-abort: acl: access denied for changeset 911600dab2ae
transaction abort!
rollback completed
+abort: acl: access denied for changeset 911600dab2ae
no rollback information available
0:6675d58eff77
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-alias Wed Aug 08 23:00:01 2007 +0200
@@ -0,0 +1,32 @@
+#!/bin/sh
+
+cat > $HGRCPATH <<EOF
+[extensions]
+alias=
+
+[alias]
+myinit = init
+cleanstatus = status -c
+unknown = bargle
+ambiguous = s
+recursive = recursive
+EOF
+
+echo '% basic'
+hg myinit alias
+
+echo '% unknown'
+hg unknown
+
+echo '% ambiguous'
+hg ambiguous
+
+echo '% recursive'
+hg recursive
+
+cd alias
+echo foo > foo
+hg ci -Amfoo
+
+echo '% with opts'
+hg cleanst
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-alias.out Wed Aug 08 23:00:01 2007 +0200
@@ -0,0 +1,10 @@
+% basic
+% unknown
+*** [alias] unknown: command bargle is unknown
+% ambiguous
+*** [alias] ambiguous: command s is ambiguous
+% recursive
+*** [alias] recursive: circular dependency on recursive
+adding foo
+% with opts
+C foo
--- a/tests/test-annotate Wed Aug 08 22:47:30 2007 +0200
+++ b/tests/test-annotate Wed Aug 08 23:00:01 2007 +0200
@@ -12,18 +12,27 @@
echo % annotate -c
hg annotate -c a
+echo % annotate -cl
+hg annotate -cl a
+
echo % annotate -d
hg annotate -d a
echo % annotate -n
hg annotate -n a
+echo % annotate -nl
+hg annotate -nl a
+
echo % annotate -u
hg annotate -u a
echo % annotate -cdnu
hg annotate -cdnu a
+echo % annotate -cdnul
+hg annotate -cdnul a
+
cat <<EOF >>a
a
a
@@ -32,28 +41,34 @@
hg cp a b
hg ci -mb -d '1 0'
cat <<EOF >> b
-b
-b
-b
+b4
+b5
+b6
EOF
hg ci -mb2 -d '2 0'
-echo % annotate b
-hg annotate b
+echo % annotate -n b
+hg annotate -n b
+echo % annotate -nl b
+hg annotate -nl b
echo % annotate -nf b
hg annotate -nf b
+echo % annotate -nlf b
+hg annotate -nlf b
hg up -C 2
cat <<EOF >> b
-b
+b4
c
-b
+b5
EOF
hg ci -mb2.1 -d '2 0'
hg merge
hg ci -mmergeb -d '3 0'
echo % annotate after merge
hg annotate -nf b
+echo % annotate after merge with -l
+hg annotate -nlf b
hg up -C 1
hg cp a b
@@ -65,17 +80,21 @@
hg ci -mc -d '3 0'
hg merge
cat <<EOF >> b
-b
+b4
c
-b
+b5
EOF
echo d >> b
hg ci -mmerge2 -d '4 0'
echo % annotate after rename merge
hg annotate -nf b
+echo % annotate after rename merge with -l
+hg annotate -nlf b
echo % linkrev vs rev
-hg annotate -r tip a
+hg annotate -r tip -n a
+echo % linkrev vs rev with -l
+hg annotate -r tip -nl a
# test issue 589
# annotate was crashing when trying to --follow something
--- a/tests/test-annotate.out Wed Aug 08 22:47:30 2007 +0200
+++ b/tests/test-annotate.out Wed Aug 08 23:00:01 2007 +0200
@@ -3,28 +3,48 @@
adding a
% annotate -c
8435f90966e4: a
+% annotate -cl
+8435f90966e4:1: a
% annotate -d
Thu Jan 01 00:00:01 1970 +0000: a
% annotate -n
0: a
+% annotate -nl
+0:1: a
% annotate -u
nobody: a
% annotate -cdnu
nobody 0 8435f90966e4 Thu Jan 01 00:00:01 1970 +0000: a
-% annotate b
+% annotate -cdnul
+nobody 0 8435f90966e4 Thu Jan 01 00:00:01 1970 +0000:1: a
+% annotate -n b
2: a
2: a
2: a
-3: b
-3: b
-3: b
+3: b4
+3: b5
+3: b6
+% annotate -nl b
+2:1: a
+2:2: a
+2:3: a
+3:4: b4
+3:5: b5
+3:6: b6
% annotate -nf b
0 a: a
1 a: a
1 a: a
-3 b: b
-3 b: b
-3 b: b
+3 b: b4
+3 b: b5
+3 b: b6
+% annotate -nlf b
+0 a:1: a
+1 a:2: a
+1 a:3: a
+3 b:4: b4
+3 b:5: b5
+3 b:6: b6
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
merging b
0 files updated, 1 files merged, 0 files removed, 0 files unresolved
@@ -33,9 +53,16 @@
0 a: a
1 a: a
1 a: a
-3 b: b
+3 b: b4
4 b: c
-3 b: b
+3 b: b5
+% annotate after merge with -l
+0 a:1: a
+1 a:2: a
+1 a:3: a
+3 b:4: b4
+4 b:5: c
+3 b:5: b5
0 files updated, 0 files merged, 1 files removed, 0 files unresolved
merging b
0 files updated, 1 files merged, 0 files removed, 0 files unresolved
@@ -44,14 +71,26 @@
0 a: a
6 b: z
1 a: a
-3 b: b
+3 b: b4
4 b: c
-3 b: b
+3 b: b5
7 b: d
+% annotate after rename merge with -l
+0 a:1: a
+6 b:2: z
+1 a:3: a
+3 b:4: b4
+4 b:5: c
+3 b:5: b5
+7 b:7: d
% linkrev vs rev
0: a
1: a
1: a
+% linkrev vs rev with -l
+0:1: a
+1:2: a
+1:3: a
% generate ABA rename configuration
% annotate after ABA with follow
foo: foo
--- a/tests/test-archive Wed Aug 08 22:47:30 2007 +0200
+++ b/tests/test-archive Wed Aug 08 23:00:01 2007 +0200
@@ -63,6 +63,7 @@
unzip -t test.zip
hg archive -t tar - | tar tf - | sed "s/$QTIP/TIP/"
+
hg archive -r 0 -t tar rev-%r.tar
if [ -f rev-0.tar ]; then
echo 'rev-0.tar created'
--- a/tests/test-bad-extension.out Wed Aug 08 22:47:30 2007 +0200
+++ b/tests/test-bad-extension.out Wed Aug 08 23:00:01 2007 +0200
@@ -1,5 +1,4 @@
*** failed to import extension badext: bit bucket overflow
-extension 'hgext.gpg' overrides commands: sigs sigcheck sign
hg help [COMMAND]
show help for a command, extension, or list of commands
--- a/tests/test-bundle-r.out Wed Aug 08 22:47:30 2007 +0200
+++ b/tests/test-bundle-r.out Wed Aug 08 23:00:01 2007 +0200
@@ -152,9 +152,9 @@
% 2
2:d62976ca1e50
adding changesets
-abort: unknown parent ac69c658229d!
transaction abort!
rollback completed
+abort: unknown parent ac69c658229d!
% 2
2:d62976ca1e50
adding changesets
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-children Wed Aug 08 23:00:01 2007 +0200
@@ -0,0 +1,59 @@
+#!/bin/sh
+# test children command
+
+cat <<EOF >> $HGRCPATH
+[extensions]
+hgext.children=
+EOF
+
+echo "% init"
+hg init t
+cd t
+
+echo "% no working directory"
+hg children
+
+echo % setup
+echo 0 > file0
+hg ci -qAm 0 -d '0 0'
+
+echo 1 > file1
+hg ci -qAm 1 -d '1 0'
+
+echo 2 >> file0
+hg ci -qAm 2 -d '2 0'
+
+hg co null
+echo 3 > file3
+hg ci -qAm 3 -d '3 0'
+
+echo "% hg children at revision 3 (tip)"
+hg children
+
+hg co null
+echo "% hg children at nullrev (should be 0 and 3)"
+hg children
+
+hg co 1
+echo "% hg children at revision 1 (should be 2)"
+hg children
+
+hg co 2
+echo "% hg children at revision 2 (other head)"
+hg children
+
+for i in null 0 1 2 3; do
+ echo "% hg children -r $i"
+ hg children -r $i
+done
+
+echo "% hg children -r 0 file0 (should be 2)"
+hg children -r 0 file0
+
+echo "% hg children -r 1 file0 (should be 2)"
+hg children -r 1 file0
+
+hg co 0
+echo "% hg children file0 at revision 0 (should be 2)"
+hg children file0
+
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-children.out Wed Aug 08 23:00:01 2007 +0200
@@ -0,0 +1,62 @@
+% init
+% no working directory
+% setup
+0 files updated, 0 files merged, 2 files removed, 0 files unresolved
+% hg children at revision 3 (tip)
+0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+% hg children at nullrev (should be 0 and 3)
+2 files updated, 0 files merged, 0 files removed, 0 files unresolved
+% hg children at revision 1 (should be 2)
+changeset: 2:8f5eea5023c2
+user: test
+date: Thu Jan 01 00:00:02 1970 +0000
+summary: 2
+
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+% hg children at revision 2 (other head)
+% hg children -r null
+changeset: 0:4df8521a7374
+user: test
+date: Thu Jan 01 00:00:00 1970 +0000
+summary: 0
+
+changeset: 3:e2962852269d
+tag: tip
+parent: -1:000000000000
+user: test
+date: Thu Jan 01 00:00:03 1970 +0000
+summary: 3
+
+% hg children -r 0
+changeset: 1:708c093edef0
+user: test
+date: Thu Jan 01 00:00:01 1970 +0000
+summary: 1
+
+% hg children -r 1
+changeset: 2:8f5eea5023c2
+user: test
+date: Thu Jan 01 00:00:02 1970 +0000
+summary: 2
+
+% hg children -r 2
+% hg children -r 3
+% hg children -r 0 file0 (should be 2)
+changeset: 2:8f5eea5023c2
+user: test
+date: Thu Jan 01 00:00:02 1970 +0000
+summary: 2
+
+% hg children -r 1 file0 (should be 2)
+changeset: 2:8f5eea5023c2
+user: test
+date: Thu Jan 01 00:00:02 1970 +0000
+summary: 2
+
+1 files updated, 0 files merged, 1 files removed, 0 files unresolved
+% hg children file0 at revision 0 (should be 2)
+changeset: 2:8f5eea5023c2
+user: test
+date: Thu Jan 01 00:00:02 1970 +0000
+summary: 2
+
--- a/tests/test-clone-pull-corruption.out Wed Aug 08 22:47:30 2007 +0200
+++ b/tests/test-clone-pull-corruption.out Wed Aug 08 23:00:01 2007 +0200
@@ -1,8 +1,8 @@
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
pulling from ../source
-abort: pretxncommit hook exited with status 1
transaction abort!
rollback completed
+abort: pretxncommit hook exited with status 1
searching for changes
adding changesets
adding manifests
--- a/tests/test-commit.out Wed Aug 08 22:47:30 2007 +0200
+++ b/tests/test-commit.out Wed Aug 08 23:00:01 2007 +0200
@@ -1,4 +1,6 @@
% commit date test
+transaction abort!
+rollback completed
abort: impossible time zone offset: 4444444
transaction abort!
rollback completed
@@ -6,8 +8,6 @@
transaction abort!
rollback completed
abort: invalid date: 'foo bar'
-transaction abort!
-rollback completed
nothing changed
% partial commit test
trouble committing bar!
--- a/tests/test-committer.out Wed Aug 08 22:47:30 2007 +0200
+++ b/tests/test-committer.out Wed Aug 08 23:00:01 2007 +0200
@@ -22,7 +22,7 @@
date: Mon Jan 12 13:46:40 1970 +0000
summary: commit-1
-abort: Please specify a username.
transaction abort!
rollback completed
+abort: Please specify a username.
No username found, using user@host instead
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-convert Wed Aug 08 23:00:01 2007 +0200
@@ -0,0 +1,21 @@
+#!/bin/sh
+
+echo "[extensions]" >> $HGRCPATH
+echo "convert=" >> $HGRCPATH
+
+hg init a
+cd a
+echo a > a
+hg ci -d'0 0' -Ama
+hg cp a b
+hg ci -d'1 0' -mb
+hg rm a
+hg ci -d'2 0' -mc
+hg mv b a
+hg ci -d'3 0' -md
+echo a >> a
+hg ci -d'4 0' -me
+
+cd ..
+hg convert a 2>&1 | grep -v 'subversion python bindings could not be loaded'
+hg --cwd a-hg pull ../a
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-convert.out Wed Aug 08 23:00:01 2007 +0200
@@ -0,0 +1,14 @@
+adding a
+assuming destination a-hg
+initializing destination a-hg repository
+scanning source...
+sorting...
+converting...
+4 a
+3 b
+2 c
+1 d
+0 e
+pulling from ../a
+searching for changes
+no changes found
--- a/tests/test-debugcomplete.out Wed Aug 08 22:47:30 2007 +0200
+++ b/tests/test-debugcomplete.out Wed Aug 08 23:00:01 2007 +0200
@@ -110,6 +110,7 @@
% Show the options for the "serve" command
--accesslog
--address
+--certificate
--config
--cwd
--daemon
--- a/tests/test-double-merge Wed Aug 08 22:47:30 2007 +0200
+++ b/tests/test-double-merge Wed Aug 08 23:00:01 2007 +0200
@@ -15,7 +15,7 @@
# in another branch, change foo in a way that doesn't conflict with
# the other changes
hg up -qC 0
-echo line 0 >| foo
+echo line 0 > foo
hg cat foo >> foo
hg ci -m 'change foo' -d "1000000 0"
--- a/tests/test-encoding.out Wed Aug 08 22:47:30 2007 +0200
+++ b/tests/test-encoding.out Wed Aug 08 23:00:01 2007 +0200
@@ -9,9 +9,9 @@
? latin-1
? latin-1-tag
? utf-8
-abort: decoding near ' encoded: é': 'ascii' codec can't decode byte 0xe9 in position 20: ordinal not in range(128)!
transaction abort!
rollback completed
+abort: decoding near ' encoded: é': 'ascii' codec can't decode byte 0xe9 in position 20: ordinal not in range(128)!
% these should work
marked working directory as branch é
% ascii
--- a/tests/test-extdiff Wed Aug 08 22:47:30 2007 +0200
+++ b/tests/test-extdiff Wed Aug 08 23:00:01 2007 +0200
@@ -6,7 +6,9 @@
hg init a
cd a
echo a > a
+echo b > b
hg add
+# should diff cloned directories
hg extdiff -o -r $opt
echo "[extdiff]" >> $HGRCPATH
@@ -22,13 +24,17 @@
echo b >> a
hg ci -d '1 0' -mtest2
+# should diff cloned files directly
hg falabala -r 0:1
# test diff during merge
hg update 0
-echo b >> b
-hg add b
+echo c >> c
+hg add c
hg ci -m "new branch" -d '1 0'
hg update -C 1
hg merge tip
-hg falabala || echo "diff-like tools yield a non-zero exit code"
+# should diff cloned file against wc file
+hg falabala > out || echo "diff-like tools yield a non-zero exit code"
+# cleanup the output since the wc is a tmp directory
+sed 's:\(.* \).*\(\/test-extdiff\):\1[tmp]\2:' out
--- a/tests/test-extdiff.out Wed Aug 08 22:47:30 2007 +0200
+++ b/tests/test-extdiff.out Wed Aug 08 23:00:01 2007 +0200
@@ -1,9 +1,7 @@
adding a
-making snapshot of 0 files from rev 000000000000
-making snapshot of 1 files from working dir
+adding b
Only in a: a
-making snapshot of 0 files from rev 000000000000
-making snapshot of 1 files from working dir
+Only in a: b
diffing a.000000000000 a
hg falabala [OPTION]... [FILE]...
@@ -26,14 +24,10 @@
-X --exclude exclude names matching the given patterns
use "hg -v help falabala" to show global options
-making snapshot of 1 files from rev e27a2475d60a
-making snapshot of 1 files from rev 5e49ec8d3f05
-diffing a.e27a2475d60a a.5e49ec8d3f05
+diffing a.8a5febb7f867/a a.34eed99112ab/a
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1 files updated, 0 files merged, 1 files removed, 0 files unresolved
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
(branch merge, don't forget to commit)
-making snapshot of 1 files from rev 5e49ec8d3f05
-making snapshot of 1 files from working dir
-diffing a.5e49ec8d3f05 a
diff-like tools yield a non-zero exit code
+diffing a.34eed99112ab/c [tmp]/test-extdiff/a/c
--- a/tests/test-hook.out Wed Aug 08 22:47:30 2007 +0200
+++ b/tests/test-hook.out Wed Aug 08 23:00:01 2007 +0200
@@ -60,9 +60,9 @@
pretxncommit hook: HG_NODE=fad284daf8c032148abaffcd745dafeceefceb61 HG_PARENT1=8ea2ef7ad3e8cac946c72f1e0c79d6aebc301198
5:fad284daf8c0
pretxncommit.forbid hook: HG_NODE=fad284daf8c032148abaffcd745dafeceefceb61 HG_PARENT1=8ea2ef7ad3e8cac946c72f1e0c79d6aebc301198
-abort: pretxncommit.forbid1 hook exited with status 1
transaction abort!
rollback completed
+abort: pretxncommit.forbid1 hook exited with status 1
4:8ea2ef7ad3e8
precommit hook: HG_PARENT1=8ea2ef7ad3e8cac946c72f1e0c79d6aebc301198
precommit.forbid hook: HG_PARENT1=8ea2ef7ad3e8cac946c72f1e0c79d6aebc301198
@@ -86,9 +86,9 @@
adding manifests
adding file changes
added 1 changesets with 1 changes to 1 files
-abort: pretxnchangegroup.forbid1 hook exited with status 1
transaction abort!
rollback completed
+abort: pretxnchangegroup.forbid1 hook exited with status 1
3:4c52fb2e4022
preoutgoing hook: HG_SOURCE=pull
outgoing hook: HG_NODE=8ea2ef7ad3e8cac946c72f1e0c79d6aebc301198 HG_SOURCE=pull
--- a/tests/test-hup.out Wed Aug 08 22:47:30 2007 +0200
+++ b/tests/test-hup.out Wed Aug 08 23:00:01 2007 +0200
@@ -1,7 +1,7 @@
0
0
adding changesets
-killed!
transaction abort!
rollback completed
+killed!
.hg/00changelog.i .hg/journal.dirstate .hg/requires .hg/store .hg/store/00changelog.i .hg/store/00changelog.i.a
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-imerge Wed Aug 08 23:00:01 2007 +0200
@@ -0,0 +1,60 @@
+#!/bin/sh
+
+echo "[extensions]" >> $HGRCPATH
+echo "imerge=" >> $HGRCPATH
+HGMERGE=true
+export HGMERGE
+
+hg init base
+cd base
+
+echo foo > foo
+echo bar > bar
+hg ci -Am0 -d '0 0'
+
+hg mv foo foo2
+echo foo >> foo2
+hg ci -m1 -d '1 0'
+
+hg up -C 0
+echo bar >> foo
+echo bar >> bar
+hg ci -m2 -d '2 0'
+
+echo % start imerge
+hg imerge
+
+cat foo2
+cat bar
+
+echo % status -v
+hg -v imerge st
+
+echo % next
+hg imerge next
+
+echo % merge next
+hg --traceback imerge
+
+echo % unresolve
+hg imerge unres foo
+
+echo % merge foo
+hg imerge merge foo
+
+echo % save
+echo foo > foo2
+hg imerge save ../savedmerge
+
+echo % load
+hg up -C 0
+hg imerge --traceback load ../savedmerge
+cat foo2
+
+hg ci -m'merged' -d '3 0'
+hg tip -v
+
+echo % nothing to merge
+hg imerge
+
+exit 0
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-imerge.out Wed Aug 08 23:00:01 2007 +0200
@@ -0,0 +1,45 @@
+adding bar
+adding foo
+1 files updated, 0 files merged, 1 files removed, 0 files unresolved
+% start imerge
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+(branch merge, don't forget to commit)
+U foo
+foo
+bar
+bar
+bar
+% status -v
+merging e6da46716401 and 30d266f502e7
+U foo (foo2)
+% next
+foo
+% merge next
+merging foo and foo2
+all conflicts resolved
+% unresolve
+% merge foo
+merging foo and foo2
+all conflicts resolved
+% save
+% load
+2 files updated, 0 files merged, 1 files removed, 0 files unresolved
+2 files updated, 0 files merged, 0 files removed, 0 files unresolved
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+(branch merge, don't forget to commit)
+R foo
+all conflicts resolved
+foo
+changeset: 3:fa9a6defdcaf
+tag: tip
+parent: 2:e6da46716401
+parent: 1:30d266f502e7
+user: test
+date: Thu Jan 01 00:00:03 1970 +0000
+files: foo foo2
+description:
+merged
+
+
+% nothing to merge
+abort: there is nothing to merge - use "hg update" instead
--- a/tests/test-import Wed Aug 08 22:47:30 2007 +0200
+++ b/tests/test-import Wed Aug 08 23:00:01 2007 +0200
@@ -93,6 +93,24 @@
hg --cwd b tip | grep second
rm -r b
+# subject: duplicate detection, removal of [PATCH]
+cat > mkmsg2.py <<EOF
+import email.Message, sys
+msg = email.Message.Message()
+msg.set_payload('email patch\n\nnext line\n' + open('tip.patch').read())
+msg['Subject'] = '[PATCH] email patch'
+msg['From'] = 'email patcher'
+sys.stdout.write(msg.as_string())
+EOF
+
+echo '% plain diff in email, [PATCH] subject, message body with subject'
+hg clone -r0 a b
+hg --cwd a diff -r0:1 > tip.patch
+python mkmsg2.py | hg --cwd b import -
+hg --cwd b tip --template '{desc}\n'
+rm -r b
+
+
# bug non regression test
# importing a patch in a subdirectory failed at the commit stage
echo line 2 >> a/d1/d2/a
--- a/tests/test-import.out Wed Aug 08 22:47:30 2007 +0200
+++ b/tests/test-import.out Wed Aug 08 23:00:01 2007 +0200
@@ -100,6 +100,17 @@
2 files updated, 0 files merged, 0 files removed, 0 files unresolved
applying patch from stdin
summary: second change
+% plain diff in email, [PATCH] subject, message body with subject
+requesting all changes
+adding changesets
+adding manifests
+adding file changes
+added 1 changesets with 2 changes to 2 files
+2 files updated, 0 files merged, 0 files removed, 0 files unresolved
+applying patch from stdin
+email patch
+
+next line
% hg import in a subdirectory
requesting all changes
adding changesets
--- a/tests/test-issue322.out Wed Aug 08 22:47:30 2007 +0200
+++ b/tests/test-issue322.out Wed Aug 08 23:00:01 2007 +0200
@@ -1,12 +1,12 @@
% file replaced with directory
adding a
% should fail - would corrupt dirstate
-abort: file named 'a' already in dirstate
+abort: file 'a' in dirstate clashes with 'a/a'
% directory replaced with file
adding a/a
% should fail - would corrupt dirstate
-abort: directory named 'a' already in dirstate
+abort: directory 'a' already in dirstate
% directory replaced with file
adding b/c/d
% should fail - would corrupt dirstate
-abort: directory named 'b' already in dirstate
+abort: directory 'b' already in dirstate
--- a/tests/test-mq Wed Aug 08 22:47:30 2007 +0200
+++ b/tests/test-mq Wed Aug 08 23:00:01 2007 +0200
@@ -338,6 +338,33 @@
cat .hg/patches/bar
hg log -vC --template '{rev} {file_copies%filecopy}\n' -r .
+echo % create a git patch
+echo a > alexander
+hg add alexander
+hg qnew -f --git addalexander
+grep diff .hg/patches/addalexander
+
+echo % create a git binary patch
+cat > writebin.py <<EOF
+import sys
+path = sys.argv[1]
+open(path, 'wb').write('BIN\x00ARY')
+EOF
+python writebin.py bucephalus
+
+python "$TESTDIR/md5sum.py" bucephalus
+hg add bucephalus
+hg qnew -f --git addbucephalus
+grep diff .hg/patches/addbucephalus
+
+echo % check binary patches can be popped and pushed
+hg qpop
+test -f bucephalus && echo % bucephalus should not be there
+hg qpush
+test -f bucephalus || echo % bucephalus should be there
+python "$TESTDIR/md5sum.py" bucephalus
+
+
echo '% strip again'
cd ..
hg init strip
@@ -370,10 +397,17 @@
echo foo > foo
hg add foo
hg ci -m 'add foo'
-hg qinit -c
+hg qinit
hg qnew patch1
echo bar >> foo
hg qrefresh -m 'change foo'
+cd ..
+
+# repo with unversioned patch dir
+hg qclone qclonesource failure
+
+cd qclonesource
+hg qinit -c
hg qci -m checkpoint
qlog
cd ..
--- a/tests/test-mq.out Wed Aug 08 22:47:30 2007 +0200
+++ b/tests/test-mq.out Wed Aug 08 23:00:01 2007 +0200
@@ -262,7 +262,8 @@
Patch queue now empty
applying foo
applying bar
-1 out of 1 hunk ignored -- saving rejects to file foo.rej
+file foo already exists
+1 out of 1 hunk FAILED -- saving rejects to file foo.rej
patch failed, unable to continue (try -v)
patch failed, rejects left in working dir
Errors during apply, please fix and refresh bar
@@ -359,6 +360,16 @@
@@ -0,0 +1,1 @@
+bar
3 barney (foo)
+% create a git patch
+diff --git a/alexander b/alexander
+% create a git binary patch
+8ba2a2f3e77b55d03051ff9c24ad65e7 bucephalus
+diff --git a/bucephalus b/bucephalus
+% check binary patches can be popped and pushed
+Now at: addalexander
+applying addbucephalus
+Now at: addbucephalus
+8ba2a2f3e77b55d03051ff9c24ad65e7 bucephalus
% strip again
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
merging foo
@@ -409,6 +420,8 @@
summary: add foo
% qclone
+abort: versioned patch repository not found (see qinit -c)
+adding .hg/patches/patch1
main repo:
rev 1: change foo
rev 0: add foo
--- a/tests/test-parse-date.out Wed Aug 08 22:47:30 2007 +0200
+++ b/tests/test-parse-date.out Wed Aug 08 23:00:01 2007 +0200
@@ -3,6 +3,8 @@
merging with changeset 2:e6c3abc120e7
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
(branch merge, don't forget to commit)
+transaction abort!
+rollback completed
abort: invalid date: 'should fail'
transaction abort!
rollback completed
@@ -10,8 +12,6 @@
transaction abort!
rollback completed
abort: impossible time zone offset: 1400000
-transaction abort!
-rollback completed
Sun Jan 15 13:30:00 2006 +0500
Sun Jan 15 13:30:00 2006 -0800
Sat Jul 15 13:30:00 2006 +0500
--- a/tests/test-rebuildstate.out Wed Aug 08 22:47:30 2007 +0200
+++ b/tests/test-rebuildstate.out Wed Aug 08 23:00:01 2007 +0200
@@ -1,7 +1,7 @@
adding bar
adding foo
% state dump
-a 644 0 baz
+a 0 -1 baz
n 644 0 foo
r 0 0 bar
% status
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-record Wed Aug 08 23:00:01 2007 +0200
@@ -0,0 +1,216 @@
+#!/bin/sh
+
+echo "[ui]" >> $HGRCPATH
+echo "interactive=true" >> $HGRCPATH
+echo "[extensions]" >> $HGRCPATH
+echo "record=" >> $HGRCPATH
+
+echo % help
+
+hg help record
+
+hg init a
+cd a
+
+echo % select no files
+
+touch empty-rw
+hg add empty-rw
+hg record empty-rw<<EOF
+n
+EOF
+echo; hg tip -p
+
+echo % select files but no hunks
+
+hg record empty-rw<<EOF
+y
+n
+EOF
+echo; hg tip -p
+
+echo % record empty file
+
+hg record -d '0 0' -m empty empty-rw<<EOF
+y
+y
+EOF
+echo; hg tip -p
+
+echo % rename empty file
+
+hg mv empty-rw empty-rename
+hg record -d '1 0' -m rename<<EOF
+y
+EOF
+echo; hg tip -p
+
+echo % copy empty file
+
+hg cp empty-rename empty-copy
+hg record -d '2 0' -m copy<<EOF
+y
+EOF
+echo; hg tip -p
+
+echo % delete empty file
+
+hg rm empty-copy
+hg record -d '3 0' -m delete<<EOF
+y
+EOF
+echo; hg tip -p
+
+echo % add binary file
+
+hg bundle --base -2 tip.bundle
+hg add tip.bundle
+hg record -d '4 0' -m binary<<EOF
+y
+EOF
+echo; hg tip -p
+
+echo % change binary file
+
+hg bundle --base -2 tip.bundle
+hg record -d '5 0' -m binary-change<<EOF
+y
+EOF
+echo; hg tip -p
+
+echo % rename and change binary file
+
+hg mv tip.bundle top.bundle
+hg bundle --base -2 top.bundle
+hg record -d '6 0' -m binary-change-rename<<EOF
+y
+EOF
+echo; hg tip -p
+
+echo % add plain file
+
+for i in 1 2 3 4 5 6 7 8 9 10; do
+ echo $i >> plain
+done
+
+hg add plain
+hg record -d '7 0' -m plain plain<<EOF
+y
+y
+EOF
+echo; hg tip -p
+
+echo % modify end of plain file
+
+echo 11 >> plain
+hg record -d '8 0' -m end plain <<EOF
+y
+y
+EOF
+
+echo % modify end of plain file, no EOL
+
+hg tip --template '{node}' >> plain
+hg record -d '9 0' -m noeol plain <<EOF
+y
+y
+EOF
+
+echo % modify end of plain file, add EOL
+
+echo >> plain
+hg record -d '10 0' -m eol plain <<EOF
+y
+y
+y
+EOF
+
+echo % modify beginning, trim end, record both
+
+rm plain
+for i in 2 2 3 4 5 6 7 8 9 10; do
+ echo $i >> plain
+done
+
+hg record -d '10 0' -m begin-and-end plain <<EOF
+y
+y
+y
+EOF
+echo; hg tip -p
+
+echo % trim beginning, modify end
+
+rm plain
+for i in 4 5 6 7 8 9 10.new; do
+ echo $i >> plain
+done
+
+echo % record end
+
+hg record -d '11 0' -m end-only plain <<EOF
+y
+n
+y
+EOF
+echo; hg tip -p
+
+echo % record beginning
+
+hg record -d '12 0' -m begin-only plain <<EOF
+y
+y
+EOF
+echo; hg tip -p
+
+echo % add to beginning, trim from end
+
+rm plain
+for i in 1 2 3 4 5 6 7 8 9; do
+ echo $i >> plain
+done
+
+echo % record end
+
+hg record --traceback -d '13 0' -m end-again plain<<EOF
+y
+n
+y
+EOF
+
+echo % add to beginning, middle, end
+
+rm plain
+for i in 1 2 3 4 5 5.new 5.reallynew 6 7 8 9 10 11; do
+ echo $i >> plain
+done
+
+echo % record beginning, middle
+
+hg record -d '14 0' -m middle-only plain <<EOF
+y
+y
+y
+n
+EOF
+echo; hg tip -p
+
+echo % record end
+
+hg record -d '15 0' -m end-only plain <<EOF
+y
+y
+EOF
+echo; hg tip -p
+
+mkdir subdir
+cd subdir
+echo a > a
+hg ci -d '16 0' -Amsubdir
+
+echo a >> a
+hg record -d '16 0' -m subdir-change a <<EOF
+y
+y
+EOF
+echo; hg tip -p
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-record.out Wed Aug 08 23:00:01 2007 +0200
@@ -0,0 +1,403 @@
+% help
+hg record [OPTION]... [FILE]...
+
+interactively select changes to commit
+
+options:
+
+ -A --addremove mark new/missing files as added/removed before committing
+ -I --include include names matching the given patterns
+ -X --exclude exclude names matching the given patterns
+ -m --message use <text> as commit message
+ -l --logfile read commit message from <file>
+ -d --date record datecode as commit date
+ -u --user record user as committer
+
+use "hg -v help record" to show global options
+% select no files
+diff --git a/empty-rw b/empty-rw
+new file mode 100644
+record changes to 'empty-rw'? [y]es [n]o no changes to record
+
+changeset: -1:000000000000
+tag: tip
+user:
+date: Thu Jan 01 00:00:00 1970 +0000
+
+
+% select files but no hunks
+diff --git a/empty-rw b/empty-rw
+new file mode 100644
+record changes to 'empty-rw'? [y]es [n]o transaction abort!
+rollback completed
+
+changeset: -1:000000000000
+tag: tip
+user:
+date: Thu Jan 01 00:00:00 1970 +0000
+
+
+% record empty file
+diff --git a/empty-rw b/empty-rw
+new file mode 100644
+record changes to 'empty-rw'? [y]es [n]o
+changeset: 0:c0708cf4e46e
+tag: tip
+user: test
+date: Thu Jan 01 00:00:00 1970 +0000
+summary: empty
+
+
+% rename empty file
+diff --git a/empty-rw b/empty-rename
+rename from empty-rw
+rename to empty-rename
+record changes to 'empty-rw' and 'empty-rename'? [y]es [n]o
+changeset: 1:df251d174da3
+tag: tip
+user: test
+date: Thu Jan 01 00:00:01 1970 +0000
+summary: rename
+
+
+% copy empty file
+diff --git a/empty-rename b/empty-copy
+copy from empty-rename
+copy to empty-copy
+record changes to 'empty-rename' and 'empty-copy'? [y]es [n]o
+changeset: 2:b63ea3939f8d
+tag: tip
+user: test
+date: Thu Jan 01 00:00:02 1970 +0000
+summary: copy
+
+
+% delete empty file
+diff --git a/empty-copy b/empty-copy
+deleted file mode 100644
+record changes to 'empty-copy'? [y]es [n]o
+changeset: 3:a2546574bce9
+tag: tip
+user: test
+date: Thu Jan 01 00:00:03 1970 +0000
+summary: delete
+
+
+% add binary file
+diff --git a/tip.bundle b/tip.bundle
+new file mode 100644
+this is a binary file
+record changes to 'tip.bundle'? [y]es [n]o
+changeset: 4:9e998a545a8b
+tag: tip
+user: test
+date: Thu Jan 01 00:00:04 1970 +0000
+summary: binary
+
+diff -r a2546574bce9 -r 9e998a545a8b tip.bundle
+Binary file tip.bundle has changed
+
+% change binary file
+diff --git a/tip.bundle b/tip.bundle
+this modifies a binary file (all or nothing)
+record changes to 'tip.bundle'? [y]es [n]o
+changeset: 5:93d05561507d
+tag: tip
+user: test
+date: Thu Jan 01 00:00:05 1970 +0000
+summary: binary-change
+
+diff -r 9e998a545a8b -r 93d05561507d tip.bundle
+Binary file tip.bundle has changed
+
+% rename and change binary file
+diff --git a/tip.bundle b/top.bundle
+rename from tip.bundle
+rename to top.bundle
+this modifies a binary file (all or nothing)
+record changes to 'tip.bundle' and 'top.bundle'? [y]es [n]o
+changeset: 6:699cc1bea9aa
+tag: tip
+user: test
+date: Thu Jan 01 00:00:06 1970 +0000
+summary: binary-change-rename
+
+diff -r 93d05561507d -r 699cc1bea9aa tip.bundle
+Binary file tip.bundle has changed
+diff -r 93d05561507d -r 699cc1bea9aa top.bundle
+Binary file top.bundle has changed
+
+% add plain file
+diff --git a/plain b/plain
+new file mode 100644
+record changes to 'plain'? [y]es [n]o
+changeset: 7:118ed744216b
+tag: tip
+user: test
+date: Thu Jan 01 00:00:07 1970 +0000
+summary: plain
+
+diff -r 699cc1bea9aa -r 118ed744216b plain
+--- /dev/null Thu Jan 01 00:00:00 1970 +0000
++++ b/plain Thu Jan 01 00:00:07 1970 +0000
+@@ -0,0 +1,10 @@
++1
++2
++3
++4
++5
++6
++7
++8
++9
++10
+
+% modify end of plain file
+diff --git a/plain b/plain
+1 hunks, 1 lines changed
+record changes to 'plain'? [y]es [n]o @@ -8,3 +8,4 @@ 8
+ 8
+ 9
+ 10
++11
+record this change to 'plain'? [y]es [n]o % modify end of plain file, no EOL
+diff --git a/plain b/plain
+1 hunks, 1 lines changed
+record changes to 'plain'? [y]es [n]o @@ -9,3 +9,4 @@ 9
+ 9
+ 10
+ 11
++cf81a2760718a74d44c0c2eecb72f659e63a69c5
+\ No newline at end of file
+record this change to 'plain'? [y]es [n]o % modify end of plain file, add EOL
+diff --git a/plain b/plain
+1 hunks, 2 lines changed
+record changes to 'plain'? [y]es [n]o @@ -9,4 +9,4 @@ 9
+ 9
+ 10
+ 11
+-cf81a2760718a74d44c0c2eecb72f659e63a69c5
+\ No newline at end of file
++cf81a2760718a74d44c0c2eecb72f659e63a69c5
+record this change to 'plain'? [y]es [n]o % modify beginning, trim end, record both
+diff --git a/plain b/plain
+2 hunks, 4 lines changed
+record changes to 'plain'? [y]es [n]o @@ -1,4 +1,4 @@ 1
+-1
++2
+ 2
+ 3
+ 4
+record this change to 'plain'? [y]es [n]o @@ -8,5 +8,3 @@ 8
+ 8
+ 9
+ 10
+-11
+-cf81a2760718a74d44c0c2eecb72f659e63a69c5
+record this change to 'plain'? [y]es [n]o
+changeset: 11:d09ab1967dab
+tag: tip
+user: test
+date: Thu Jan 01 00:00:10 1970 +0000
+summary: begin-and-end
+
+diff -r e2ecd9b0b78d -r d09ab1967dab plain
+--- a/plain Thu Jan 01 00:00:10 1970 +0000
++++ b/plain Thu Jan 01 00:00:10 1970 +0000
+@@ -1,4 +1,4 @@ 1
+-1
++2
+ 2
+ 3
+ 4
+@@ -8,5 +8,3 @@ 8
+ 8
+ 9
+ 10
+-11
+-cf81a2760718a74d44c0c2eecb72f659e63a69c5
+
+% trim beginning, modify end
+% record end
+diff --git a/plain b/plain
+2 hunks, 5 lines changed
+record changes to 'plain'? [y]es [n]o @@ -1,9 +1,6 @@ 2
+-2
+-2
+-3
+ 4
+ 5
+ 6
+ 7
+ 8
+ 9
+record this change to 'plain'? [y]es [n]o @@ -4,7 +1,7 @@
+ 4
+ 5
+ 6
+ 7
+ 8
+ 9
+-10
++10.new
+record this change to 'plain'? [y]es [n]o
+changeset: 12:44516c9708ae
+tag: tip
+user: test
+date: Thu Jan 01 00:00:11 1970 +0000
+summary: end-only
+
+diff -r d09ab1967dab -r 44516c9708ae plain
+--- a/plain Thu Jan 01 00:00:10 1970 +0000
++++ b/plain Thu Jan 01 00:00:11 1970 +0000
+@@ -7,4 +7,4 @@ 7
+ 7
+ 8
+ 9
+-10
++10.new
+
+% record beginning
+diff --git a/plain b/plain
+1 hunks, 3 lines changed
+record changes to 'plain'? [y]es [n]o @@ -1,6 +1,3 @@ 2
+-2
+-2
+-3
+ 4
+ 5
+ 6
+record this change to 'plain'? [y]es [n]o
+changeset: 13:3ebbace64a8d
+tag: tip
+user: test
+date: Thu Jan 01 00:00:12 1970 +0000
+summary: begin-only
+
+diff -r 44516c9708ae -r 3ebbace64a8d plain
+--- a/plain Thu Jan 01 00:00:11 1970 +0000
++++ b/plain Thu Jan 01 00:00:12 1970 +0000
+@@ -1,6 +1,3 @@ 2
+-2
+-2
+-3
+ 4
+ 5
+ 6
+
+% add to beginning, trim from end
+% record end
+diff --git a/plain b/plain
+2 hunks, 4 lines changed
+record changes to 'plain'? [y]es [n]o @@ -1,6 +1,9 @@ 4
++1
++2
++3
+ 4
+ 5
+ 6
+ 7
+ 8
+ 9
+record this change to 'plain'? [y]es [n]o @@ -1,7 +4,6 @@
+ 4
+ 5
+ 6
+ 7
+ 8
+ 9
+-10.new
+record this change to 'plain'? [y]es [n]o % add to beginning, middle, end
+% record beginning, middle
+diff --git a/plain b/plain
+3 hunks, 7 lines changed
+record changes to 'plain'? [y]es [n]o @@ -1,2 +1,5 @@ 4
++1
++2
++3
+ 4
+ 5
+record this change to 'plain'? [y]es [n]o @@ -1,6 +4,8 @@
+ 4
+ 5
++5.new
++5.reallynew
+ 6
+ 7
+ 8
+ 9
+record this change to 'plain'? [y]es [n]o @@ -3,4 +8,6 @@
+ 6
+ 7
+ 8
+ 9
++10
++11
+record this change to 'plain'? [y]es [n]o
+changeset: 15:c1c639d8b268
+tag: tip
+user: test
+date: Thu Jan 01 00:00:14 1970 +0000
+summary: middle-only
+
+diff -r efc0dad7bd9f -r c1c639d8b268 plain
+--- a/plain Thu Jan 01 00:00:13 1970 +0000
++++ b/plain Thu Jan 01 00:00:14 1970 +0000
+@@ -1,5 +1,10 @@ 4
++1
++2
++3
+ 4
+ 5
++5.new
++5.reallynew
+ 6
+ 7
+ 8
+
+% record end
+diff --git a/plain b/plain
+1 hunks, 2 lines changed
+record changes to 'plain'? [y]es [n]o @@ -9,3 +9,5 @@ 7
+ 7
+ 8
+ 9
++10
++11
+record this change to 'plain'? [y]es [n]o
+changeset: 16:80b74bbc7808
+tag: tip
+user: test
+date: Thu Jan 01 00:00:15 1970 +0000
+summary: end-only
+
+diff -r c1c639d8b268 -r 80b74bbc7808 plain
+--- a/plain Thu Jan 01 00:00:14 1970 +0000
++++ b/plain Thu Jan 01 00:00:15 1970 +0000
+@@ -9,3 +9,5 @@ 7
+ 7
+ 8
+ 9
++10
++11
+
+adding subdir/a
+diff --git a/subdir/a b/subdir/a
+1 hunks, 1 lines changed
+record changes to 'subdir/a'? [y]es [n]o @@ -1,1 +1,2 @@ a
+ a
++a
+record this change to 'subdir/a'? [y]es [n]o
+changeset: 18:33ff5c4fb017
+tag: tip
+user: test
+date: Thu Jan 01 00:00:16 1970 +0000
+summary: subdir-change
+
+diff -r aecf2b2ea83c -r 33ff5c4fb017 subdir/a
+--- a/subdir/a Thu Jan 01 00:00:16 1970 +0000
++++ b/subdir/a Thu Jan 01 00:00:16 1970 +0000
+@@ -1,1 +1,2 @@ a
+ a
++a
+
--- a/tests/test-symlinks Wed Aug 08 22:47:30 2007 +0200
+++ b/tests/test-symlinks Wed Aug 08 23:00:01 2007 +0200
@@ -72,3 +72,13 @@
echo '2. clone it'
cd ..
hg clone test testclone
+
+echo '# git symlink diff'
+cd testclone
+hg diff --git -r null:tip
+hg export --git tip > ../sl.diff
+echo '# import git symlink diff'
+hg rm a/b/c/demo
+hg commit -m'remove link'
+hg import ../sl.diff
+hg diff --git -r 1:tip
--- a/tests/test-symlinks.out Wed Aug 08 22:47:30 2007 +0200
+++ b/tests/test-symlinks.out Wed Aug 08 23:00:01 2007 +0200
@@ -20,3 +20,20 @@
adding a/b/c/demo
2. clone it
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+# git symlink diff
+diff --git a/a/b/c/demo b/a/b/c/demo
+new file mode 120000
+--- /dev/null
++++ b/a/b/c/demo
+@@ -0,0 +1,1 @@
++/path/to/symlink/source
+\ No newline at end of file
+# import git symlink diff
+applying ../sl.diff
+diff --git a/a/b/c/demo b/a/b/c/demo
+new file mode 120000
+--- /dev/null
++++ b/a/b/c/demo
+@@ -0,0 +1,1 @@
++/path/to/symlink/source
+\ No newline at end of file
--- a/tests/test-transplant.out Wed Aug 08 22:47:30 2007 +0200
+++ b/tests/test-transplant.out Wed Aug 08 23:00:01 2007 +0200
@@ -101,17 +101,17 @@
adding bar
2 files updated, 0 files merged, 2 files removed, 0 files unresolved
applying a1e30dd1b8e7
-foo
-Hunk #1 FAILED at 1.
+patching file foo
+Hunk #1 FAILED at 0
1 out of 1 hunk FAILED -- saving rejects to file foo.rej
-patch command failed: exited with status 1
+patch failed to apply
abort: Fix up the merge and run hg transplant --continue
1 files updated, 0 files merged, 1 files removed, 0 files unresolved
applying a1e30dd1b8e7
-foo
-Hunk #1 FAILED at 1.
+patching file foo
+Hunk #1 FAILED at 0
1 out of 1 hunk FAILED -- saving rejects to file foo.rej
-patch command failed: exited with status 1
+patch failed to apply
abort: Fix up the merge and run hg transplant --continue
a1e30dd1b8e7 transplanted as f1563cf27039
skipping already applied revision 1:a1e30dd1b8e7