changeset 6042:2da5b19a6460

Merge with crew
author Bryan O'Sullivan <bos@serpentine.com>
date Wed, 06 Feb 2008 19:57:52 -0800
parents dd714452c26e (current diff) dd3267698d84 (diff)
children 6283316bcfd4
files doc/hgmerge.1.txt doc/ja/hgmerge.1.ja.txt hgext/hbisect.py hgext/mq.py hgmerge mercurial/commands.py mercurial/patch.py templates/raw/header.tmpl tests/test-archive.out tests/test-help.out tests/test-notfound tests/test-notfound.out tests/test-ro-message tests/test-ro-message.out
diffstat 446 files changed, 22458 insertions(+), 6262 deletions(-) [+]
line wrap: on
line diff
--- a/.hgignore	Thu Jul 26 07:56:27 2007 -0400
+++ b/.hgignore	Wed Feb 06 19:57:52 2008 -0800
@@ -4,6 +4,7 @@
 *.orig
 *.rej
 *~
+*.mergebackup
 *.o
 *.so
 *.pyc
@@ -21,8 +22,10 @@
 MANIFEST
 patches
 mercurial/__version__.py
+Output/Mercurial-*.exe
 .DS_Store
+tags
+cscope.*
 
 syntax: regexp
 ^\.pc/
-Output/Mercurial-[0-9.]*.exe
--- a/.hgsigs	Thu Jul 26 07:56:27 2007 -0400
+++ b/.hgsigs	Wed Feb 06 19:57:52 2008 -0800
@@ -3,3 +3,4 @@
 36a957364b1b89c150f2d0e60a99befe0ee08bd3 0 iD8DBQBFfL2QywK+sNU5EO8RAjYFAKCoGlaWRTeMsjdmxAjUYx6diZxOBwCfY6IpBYsKvPTwB3oktnPt5Rmrlys=
 27230c29bfec36d5540fbe1c976810aefecfd1d2 0 iD8DBQBFheweywK+sNU5EO8RAt7VAKCrqJQWT2/uo2RWf0ZI4bLp6v82jACgjrMdsaTbxRsypcmEsdPhlG6/8F4=
 fb4b6d5fe100b0886f8bc3d6731ec0e5ed5c4694 0 iD8DBQBGgHicywK+sNU5EO8RAgNxAJ0VG8ixAaeudx4sZbhngI1syu49HQCeNUJQfWBgA8bkJ2pvsFpNxwYaX3I=
+23889160905a1b09fffe1c07378e9fc1827606eb 0 iD8DBQBHGTzoywK+sNU5EO8RAr/UAJ0Y8s4jQtzgS+G9vM8z6CWBThZ8fwCcCT5XDj2XwxKkz/0s6UELwjsO3LU=
--- a/.hgtags	Thu Jul 26 07:56:27 2007 -0400
+++ b/.hgtags	Wed Feb 06 19:57:52 2008 -0800
@@ -15,3 +15,4 @@
 36a957364b1b89c150f2d0e60a99befe0ee08bd3 0.9.2
 27230c29bfec36d5540fbe1c976810aefecfd1d2 0.9.3
 fb4b6d5fe100b0886f8bc3d6731ec0e5ed5c4694 0.9.4
+23889160905a1b09fffe1c07378e9fc1827606eb 0.9.5
--- a/CONTRIBUTORS	Thu Jul 26 07:56:27 2007 -0400
+++ b/CONTRIBUTORS	Wed Feb 06 19:57:52 2008 -0800
@@ -1,4 +1,7 @@
-Andrea Arcangeli <andrea at suse.de>
+[This file is here for historical purposes, all recent contributors
+should appear in the changelog directly]
+
+Andrea Arcangeli <andrea at suse.de>
 Thomas Arendsen Hein <thomas at intevation.de>
 Goffredo Baroncelli <kreijack at libero.it>
 Muli Ben-Yehuda <mulix at mulix.org>
@@ -36,5 +39,3 @@
 Rafael Villar Burke <pachi at mmn-arquitectos.com>
 Tristan Wibberley <tristan at wibberley.org>
 Mark Williamson <mark.williamson at cl.cam.ac.uk>
-
-If you are a contributor and don't see your name here, please let me know.
--- a/contrib/bash_completion	Thu Jul 26 07:56:27 2007 -0400
+++ b/contrib/bash_completion	Wed Feb 06 19:57:52 2008 -0800
@@ -28,7 +28,7 @@
 #              cat ~/.patchbomb-$1
 #          fi
 #      }
-# 
+#
 #
 # Writing completion functions for additional commands:
 #
@@ -50,7 +50,7 @@
 #                  an argument (e.g. '--cwd|-R|--repository')
 # - $canonical - 1 if we canonicalized $cmd before calling the function
 #                0 otherwise
-# 
+#
 
 shopt -s extglob
 
@@ -305,6 +305,15 @@
     _hg_ext_mq_patchlist qunapplied
 }
 
+_hg_cmd_qgoto()
+{
+    if [[ "$prev" = @(-n|--name) ]]; then
+	_hg_ext_mq_queues
+	return
+    fi
+    _hg_ext_mq_patchlist qseries
+}
+
 _hg_cmd_qdelete()
 {
     local qcmd=qunapplied
@@ -425,7 +434,7 @@
     done
 
     if [ -z "$subcmd" ] || [ $COMP_CWORD -eq $i ] || [ "$subcmd" = help ]; then
-	COMPREPLY=(${COMPREPLY[@]:-} 
+	COMPREPLY=(${COMPREPLY[@]:-}
 		   $(compgen -W 'bad good help init next reset' -- "$cur"))
 	return
     fi
@@ -445,7 +454,7 @@
 {
     case "$prev" in
 	-c|--cc|-t|--to|-f|--from|--bcc)
-	    # we need an e-mail address. let the user provide a function 
+	    # we need an e-mail address. let the user provide a function
 	    # to get them
 	    if [ "$(type -t _hg_emails)" = function ]; then
 		local arg=to
--- a/contrib/churn.py	Thu Jul 26 07:56:27 2007 -0400
+++ b/contrib/churn.py	Wed Feb 06 19:57:52 2008 -0800
@@ -12,7 +12,7 @@
 # <alias email> <actual email>
 
 from mercurial.i18n import gettext as _
-from mercurial import hg, mdiff, cmdutil, ui, util, templater, node
+from mercurial import hg, mdiff, cmdutil, ui, util, templatefilters, node
 import os, sys
 
 def get_tty_width():
@@ -22,20 +22,16 @@
         except ValueError:
             pass
     try:
-        import termios, fcntl, struct
-        buf = 'abcd'
+        import termios, array, fcntl
         for dev in (sys.stdout, sys.stdin):
             try:
-                if buf != 'abcd':
-                    break
                 fd = dev.fileno()
                 if not os.isatty(fd):
                     continue
-                buf = fcntl.ioctl(fd, termios.TIOCGWINSZ, buf)
+                arri = fcntl.ioctl(fd, termios.TIOCGWINSZ, '\0' * 8)
+                return array.array('h', arri)[1]
             except ValueError:
                 pass
-        if buf != 'abcd':
-           return struct.unpack('hh', buf)[1]
     except ImportError:
         pass
     return 80
@@ -47,7 +43,7 @@
         to = mmap1 and repo.file(f).read(mmap1[f]) or None
         tn = mmap2 and repo.file(f).read(mmap2[f]) or None
 
-        diff = mdiff.unidiff(to, "", tn, "", f).split("\n")
+        diff = mdiff.unidiff(to, "", tn, "", f, f).split("\n")
 
         for line in diff:
             if not line:
@@ -73,7 +69,7 @@
     modified, added, removed, deleted, unknown = changes
 
     who = repo.changelog.read(node2)[1]
-    who = templater.email(who) # get the email of the person
+    who = util.email(who) # get the email of the person
 
     mmap1 = repo.manifest.read(repo.changelog.read(node1)[0])
     mmap2 = repo.manifest.read(repo.changelog.read(node2)[0])
@@ -118,23 +114,24 @@
         who, lines = __gather(ui, repo, node1, node2)
 
         # remap the owner if possible
-        if amap.has_key(who):
+        if who in amap:
             ui.note("using '%s' alias for '%s'\n" % (amap[who], who))
             who = amap[who]
 
-        if not stats.has_key(who):
+        if not who in stats:
             stats[who] = 0
         stats[who] += lines
 
         ui.note("rev %d: %d lines by %s\n" % (rev, lines, who))
 
         if progress:
+            nr_revs = max(nr_revs, 1)
             if int(100.0*(cur_rev - 1)/nr_revs) < int(100.0*cur_rev/nr_revs):
-                ui.write("%d%%.." % (int(100.0*cur_rev/nr_revs),))
+                ui.write("\rGenerating stats: %d%%" % (int(100.0*cur_rev/nr_revs),))
                 sys.stdout.flush()
 
     if progress:
-        ui.write("done\n")
+        ui.write("\r")
         sys.stdout.flush()
 
     return stats
@@ -148,6 +145,7 @@
         return s[0:l]
 
     def graph(n, maximum, width, char):
+        maximum = max(1, maximum)
         n = int(n * width / float(maximum))
 
         return char * (n)
@@ -182,6 +180,8 @@
     ordered = stats.items()
     ordered.sort(lambda x, y: cmp(y[1], x[1]))
 
+    if not ordered:
+        return
     maximum = ordered[0][1]
 
     width = get_tty_width()
--- a/contrib/darcs2hg.py	Thu Jul 26 07:56:27 2007 -0400
+++ b/contrib/darcs2hg.py	Wed Feb 06 19:57:52 2008 -0800
@@ -3,18 +3,22 @@
 # vim: tw=80 ts=4 sw=4 noet
 # -----------------------------------------------------------------------------
 # Project   : Basic Darcs to Mercurial conversion script
+#
+# *** DEPRECATED. Use the convert extension instead. This script will
+# *** be removed soon.
+#
 # -----------------------------------------------------------------------------
 # Authors   : Sebastien Pierre                           <sebastien@xprima.com>
 #             TK Soh                                      <teekaysoh@gmail.com>
 # -----------------------------------------------------------------------------
 # Creation  : 24-May-2006
-# Last mod  : 05-Jun-2006
 # -----------------------------------------------------------------------------
 
 import os, sys
 import tempfile
 import xml.dom.minidom as xml_dom
 from time import strptime, mktime
+import re
 
 DARCS_REPO = None
 HG_REPO    = None
@@ -93,11 +97,50 @@
 def darcs_pull(hg_repo, darcs_repo, chash):
 	old_tip = darcs_tip(darcs_repo)
 	res     = cmd("darcs pull \"%s\" --all --match=\"hash %s\"" % (darcs_repo, chash), hg_repo)
+	if re.search('^We have conflicts in the following files:$', res, re.MULTILINE):
+		print "Trying to revert files to work around conflict..."
+		rev_res = cmd ("darcs revert --all", hg_repo)
+		print rev_res
 	print res
 	new_tip = darcs_tip(darcs_repo)
 	if not new_tip != old_tip + 1:
 		error("Darcs pull did not work as expected: " + res)
 
+def darcs_changes_summary(darcs_repo, chash):
+	"""Gets the changes from the darcs summary. This returns the chronological
+	list of changes as (change_type, args). Eg. ('add_file', 'foo.txt') or
+	('move', ['foo.txt','bar.txt'])."""
+	change = cmd("darcs changes --summary --xml-output --match=\"hash %s\"" % (chash), darcs_repo)
+	doc = xml_dom.parseString(change)
+	for patch_node in doc.childNodes[0].childNodes:
+		summary_nodes = filter(lambda n: n.nodeName == "summary" and n.nodeType == n.ELEMENT_NODE, patch_node.childNodes)
+		for summary_node in summary_nodes:
+			change_nodes = filter(lambda n: n.nodeType == n.ELEMENT_NODE, summary_node.childNodes)
+			if len(change_nodes) == 0:
+				name = filter(lambda n: n.nodeName == "name", patch_node.childNodes)
+				if not name:
+					error("Darcs patch has an empty summary node and no name: " + patch_node.toxml())
+				name = name[0].childNodes[0].data.strip()
+				(tag, sub_count) = re.subn('^TAG ', '', name, 1)
+				if sub_count != 1:
+					error("Darcs patch has an empty summary node but doesn't look like a tag: " + patch_node.toxml());
+			for change_node in change_nodes:
+				change = change_node.nodeName
+				if change == 'modify_file':
+					yield change, change_node.childNodes[0].data.strip()
+				elif change == 'add_file':
+					yield change, change_node.childNodes[0].data.strip()
+				elif change == 'remove_file':
+					yield change, change_node.childNodes[0].data.strip()
+				elif change == 'add_directory':
+					yield change, change_node.childNodes[0].data.strip()
+				elif change == 'remove_directory':
+					yield change, change_node.childNodes[0].data.strip()
+				elif change == 'move':
+					yield change, (change_node.getAttribute('from'), change_node.getAttribute('to'))
+				else:
+					error('Problem parsing summary xml: Unexpected element: ' + change_node.toxml())
+
 # ------------------------------------------------------------------------------
 #
 # Mercurial interface
@@ -127,6 +170,36 @@
 	tip = tip.split("\n")[0].split(":")[1].strip()
 	return int(tip)
 
+def hg_rename( hg_repo, from_file, to_file ):
+	cmd("hg rename --after \"%s\" \"%s\"" % (from_file, to_file), hg_repo);
+	
+def hg_tag ( hg_repo, text, author, date ):
+	old_tip = hg_tip(hg_repo)
+	res = cmd("hg tag -u \"%s\" -d \"%s 0\" \"%s\""	 % (author, date, text), hg_repo)
+	new_tip = hg_tip(hg_repo)
+	if not new_tip == old_tip + 1:
+		error("Mercurial tag did not work as expected: " + res)
+
+def hg_handle_change( hg_repo, author, date, change, arg ):
+	"""Processes a change event as output by darcs_changes_summary. These
+	consist of file move/rename/add/delete commands."""
+	if change == 'modify_file':
+		pass
+	elif change == 'add_file':
+		pass
+	elif change =='remove_file':
+		pass
+	elif change == 'add_directory':
+		pass
+	elif change == 'remove_directory':
+		pass
+	elif change == 'move':
+		hg_rename(hg_repo, arg[0], arg[1])
+	elif change == 'tag':
+		hg_tag(hg_repo, arg, author, date)
+	else:
+		error('Unknown change type ' + change + ': ' + arg)
+
 # ------------------------------------------------------------------------------
 #
 # Main
@@ -147,6 +220,7 @@
 	else:
 		print USAGE
 		sys.exit(-1)
+	print 'This command is deprecated.  Use the convert extension instead.'
 	# Initializes the target repo
 	if not os.path.isdir(darcs_repo + "/_darcs"):
 		print "No darcs directory found at: " + darcs_repo
@@ -167,11 +241,13 @@
 			print "(skipping)"
 		else:
 			text = summary + "\n" + description
-			darcs_pull(hg_repo, darcs_repo, chash)
 			# The commit hash has a date like 20021020201112
 			# --------------------------------YYYYMMDDHHMMSS
 			date = chash.split("-")[0]
 			epoch = int(mktime(strptime(date, '%Y%m%d%H%M%S')))
+			darcs_pull(hg_repo, darcs_repo, chash)
+			for change, arg in darcs_changes_summary(darcs_repo, chash):
+				hg_handle_change(hg_repo, author, epoch, change, arg)
 			hg_commit(hg_repo, text, author, epoch)
 		change_number += 1
 	print "Darcs repository (_darcs) was not deleted. You can keep or remove it."
--- a/contrib/hg-ssh	Thu Jul 26 07:56:27 2007 -0400
+++ b/contrib/hg-ssh	Wed Feb 06 19:57:52 2008 -0800
@@ -1,6 +1,6 @@
 #!/usr/bin/env python
 #
-# Copyright 2005, 2006 by Intevation GmbH <intevation@intevation.de>
+# Copyright 2005-2007 by Intevation GmbH <intevation@intevation.de>
 # Author(s):
 # Thomas Arendsen Hein <thomas@intevation.de>
 #
@@ -25,7 +25,10 @@
 command="cd repos && hg-ssh user/thomas/* projects/{mercurial,foo}"
 """
 
-from mercurial import commands
+# enable importing on demand to reduce startup time
+from mercurial import demandimport; demandimport.enable()
+
+from mercurial import dispatch
 
 import sys, os
 
@@ -38,7 +41,7 @@
     path = orig_cmd[6:-14]
     repo = os.path.normpath(os.path.join(cwd, os.path.expanduser(path)))
     if repo in allowed_paths:
-        commands.dispatch(['-R', repo, 'serve', '--stdio'])
+        dispatch.dispatch(['-R', repo, 'serve', '--stdio'])
     else:
         sys.stderr.write("Illegal repository %r\n" % repo)
         sys.exit(-1)
--- a/contrib/hgdiff	Thu Jul 26 07:56:27 2007 -0400
+++ b/contrib/hgdiff	Wed Feb 06 19:57:52 2008 -0800
@@ -15,7 +15,7 @@
 parser.add_option('-x', '--count', default=1)
 parser.add_option('-c', '--context', type="int", default=3)
 parser.add_option('-p', '--show-c-function', action="store_true", default=False)
-parser.add_option('-w', '--ignore-all-space', action="store_true", 
+parser.add_option('-w', '--ignore-all-space', action="store_true",
                   default=False)
 
 (options, args) = parser.parse_args()
--- a/contrib/hgk	Thu Jul 26 07:56:27 2007 -0400
+++ b/contrib/hgk	Wed Feb 06 19:57:52 2008 -0800
@@ -4,6 +4,8 @@
 # This program is free software; it may be used, copied, modified
 # and distributed under the terms of the GNU General Public Licence,
 # either version 2, or (at your option) any later version.
+#
+# See hgk.py for extension usage and configuration.
 
 
 # Modified version of Tip 171:
@@ -32,7 +34,7 @@
     # if we are outside the app, try and scroll the focus widget
     if {![winfo exists $w]} { catch {set w [focus]} }
     if {[winfo exists $w]} {
-        
+
         if {[bind $w $evt] ne ""} {
             # Awkward ... this widget has a MouseWheel binding, but to
             # trigger successfully in it, we must give it focus.
@@ -70,8 +72,17 @@
 bind all <MouseWheel> [list ::tk::MouseWheel %W %X %Y %D 0]
 
 # end of win32 section
-} 
-
+}
+
+
+# Unify right mouse button handling.
+# See "mouse buttons on macintosh" thread on comp.lang.tcl
+if {[tk windowingsystem] eq "aqua"} {
+    event add <<B3>> <Control-ButtonPress-1>
+    event add <<B3>> <Button-2>
+} else {
+    event add <<B3>> <Button-3>
+}
 
 proc gitdir {} {
     global env
@@ -263,6 +274,7 @@
     set comname {}
     set comdate {}
     set rev {}
+    set branch {}
     if {![info exists nchildren($id)]} {
 	set children($id) {}
 	set nchildren($id) 0
@@ -299,6 +311,8 @@
 		    set comname [join [lrange $line 1 [expr {$x - 1}]]]
 		} elseif {$tag == "revision"} {
 		    set rev [lindex $line 1]
+        } elseif {$tag == "branch"} {
+		    set branch [join [lrange $line 1 end]]
 		}
 	    }
 	} else {
@@ -323,11 +337,14 @@
 	set comdate [clock format $comdate -format "%Y-%m-%d %H:%M:%S"]
     }
     set commitinfo($id) [list $headline $auname $audate \
-			     $comname $comdate $comment $rev]
+			     $comname $comdate $comment $rev $branch]
 }
 
 proc readrefs {} {
-    global tagids idtags headids idheads tagcontents env
+    global tagids idtags headids idheads tagcontents env curid
+    
+    set curid [exec $env(HG) --config ui.report_untrusted=false id]
+    regexp -- {[[:xdigit:]]+} $curid curid
 
     set tags [exec $env(HG) --config ui.report_untrusted=false tags]
     regsub -all "\r\n" $tags "\n" tags
@@ -390,6 +407,7 @@
     global entries sha1entry sha1string sha1but
     global maincursor textcursor curtextcursor
     global rowctxmenu gaudydiff mergemax
+    global hgvdiff
 
     menu .bar
     .bar add cascade -label "File" -menu .bar.file
@@ -593,6 +611,12 @@
     $rowctxmenu add command -label "Make patch" -command mkpatch
     $rowctxmenu add command -label "Create tag" -command mktag
     $rowctxmenu add command -label "Write commit to file" -command writecommit
+    if { $hgvdiff ne "" } {
+	$rowctxmenu add command -label "Visual diff with parent" \
+	    -command {vdiff 1}
+	$rowctxmenu add command -label "Visual diff with selected" \
+	    -command {vdiff 0}
+    }
 }
 
 # when we make a key binding for the toplevel, make sure
@@ -623,13 +647,14 @@
 proc savestuff {w} {
     global canv canv2 canv3 ctext cflist mainfont textfont
     global stuffsaved findmergefiles gaudydiff maxgraphpct
-    global maxwidth
+    global maxwidth authorcolors curidfont
 
     if {$stuffsaved} return
     if {![winfo viewable .]} return
     catch {
-	set f [open "~/.gitk-new" w]
+	set f [open "~/.hgk-new" w]
 	puts $f [list set mainfont $mainfont]
+	puts $f [list set curidfont $curidfont]
 	puts $f [list set textfont $textfont]
 	puts $f [list set findmergefiles $findmergefiles]
 	puts $f [list set gaudydiff $gaudydiff]
@@ -647,8 +672,25 @@
 	set wid [expr {([winfo width $cflist] - 11) \
 			   / [font measure [$cflist cget -font] "0"]}]
 	puts $f "set geometry(cflistw) $wid"
+	puts $f "#"
+	puts $f "# authorcolors format:"
+	puts $f "#"
+	puts $f "# zero or more sublists of"
+	puts $f "#"
+	puts $f "#    { regex color }"
+	puts $f "#"
+	puts $f "# followed by a list of colors"
+	puts $f "#"
+	puts $f "# If the commit author matches a regex in a sublist,"
+	puts $f "# the commit will be colored by that color"
+	puts $f "# otherwise the next unused entry from the list of colors"
+	puts $f "# will be assigned to this commit and also all other commits"
+	puts $f "# of the same author.  When the list of colors is exhausted,"
+	puts $f "# the last entry will be reused."
+	puts $f "#"
+	puts $f "set authorcolors {$authorcolors}"
 	close $f
-	file rename -force "~/.gitk-new" "~/.gitk"
+	file rename -force "~/.hgk-new" "~/.hgk"
     }
     set stuffsaved 1
 }
@@ -739,6 +781,35 @@
     pack $w.ok -side bottom
 }
 
+set aunextcolor 0
+proc assignauthorcolor {name} {
+    global authorcolors aucolormap aunextcolor
+    if [info exists aucolormap($name)] return
+
+    set randomcolors {black}
+    for {set i 0} {$i < [llength $authorcolors]} {incr i} {
+	set col [lindex $authorcolors $i]
+	if {[llength $col] > 1} {
+	    set re [lindex $col 0]
+	    set c [lindex $col 1]
+	    if {[regexp -- $re $name]} {
+		set aucolormap($name) $c
+		return
+	    }
+	} else {
+	    set randomcolors [lrange $authorcolors $i end]
+	    break
+	}
+    }
+
+    set ncolors [llength $randomcolors]
+    set c [lindex $randomcolors $aunextcolor]
+    if {[incr aunextcolor] >= $ncolors} {
+	incr aunextcolor -1
+    }
+    set aucolormap($name) $c
+}
+
 proc assigncolor {id} {
     global commitinfo colormap commcolors colors nextcolor
     global parents nparents children nchildren
@@ -876,6 +947,7 @@
     global lineno lthickness mainline mainlinearrow sidelines
     global commitlisted rowtextx idpos lastuse displist
     global oldnlines olddlevel olddisplist
+    global aucolormap curid curidfont
 
     incr numcommits
     incr lineno
@@ -934,14 +1006,25 @@
     }
     set headline [lindex $commitinfo($id) 0]
     set name [lindex $commitinfo($id) 1]
+    assignauthorcolor $name
+    set fg $aucolormap($name)
+    if {$id == $curid} {
+	set fn $curidfont
+    } else {
+	set fn $mainfont
+    }
+
     set date [lindex $commitinfo($id) 2]
     set linehtag($lineno) [$canv create text $xt $y1 -anchor w \
-			       -text $headline -font $mainfont ]
-    $canv bind $linehtag($lineno) <Button-3> "rowmenu %X %Y $id"
+			       -text $headline -font $fn \
+			       -fill $fg]
+    $canv bind $linehtag($lineno) <<B3>> "rowmenu %X %Y $id"
     set linentag($lineno) [$canv2 create text 3 $y1 -anchor w \
-			       -text $name -font $namefont]
+			       -text $name -font $namefont \
+			       -fill $fg]
     set linedtag($lineno) [$canv3 create text 3 $y1 -anchor w \
-			       -text $date -font $mainfont]
+			       -text $date -font $mainfont \
+			       -fill $fg]
 
     set olddlevel $level
     set olddisplist $displist
@@ -2206,6 +2289,9 @@
     $ctext mark gravity fmark.0 left
     set info $commitinfo($id)
     $ctext insert end "Revision: [lindex $info 6]\n"
+    if {[llength [lindex $info 7]] > 0} {
+        $ctext insert end "Branch: [lindex $info 7]\n"
+    }
     $ctext insert end "Author: [lindex $info 1]  [lindex $info 2]\n"
     $ctext insert end "Committer: [lindex $info 3]  [lindex $info 4]\n"
     if {[info exists idtags($id)]} {
@@ -3071,9 +3157,10 @@
 
 proc incrfont {inc} {
     global mainfont namefont textfont ctext canv phase
-    global stopped entries
+    global stopped entries curidfont
     unmarkmatches
     set mainfont [lreplace $mainfont 1 1 [expr {[lindex $mainfont 1] + $inc}]]
+    set curidfont [lreplace $curidfont 1 1 [expr {[lindex $curidfont 1] + $inc}]]
     set namefont [lreplace $namefont 1 1 [expr {[lindex $namefont 1] + $inc}]]
     set textfont [lreplace $textfont 1 1 [expr {[lindex $textfont 1] + $inc}]]
     setcoords
@@ -3364,7 +3451,7 @@
 }
 
 proc rowmenu {x y id} {
-    global rowctxmenu idline selectedline rowmenuid
+    global rowctxmenu idline selectedline rowmenuid hgvdiff
 
     if {![info exists selectedline] || $idline($id) eq $selectedline} {
 	set state disabled
@@ -3374,6 +3461,9 @@
     $rowctxmenu entryconfigure 0 -state $state
     $rowctxmenu entryconfigure 1 -state $state
     $rowctxmenu entryconfigure 2 -state $state
+    if { $hgvdiff ne "" } {
+	$rowctxmenu entryconfigure 6 -state $state
+    }
     set rowmenuid $id
     tk_popup $rowctxmenu $x $y
 }
@@ -3686,6 +3776,26 @@
     }
 }
 
+proc vdiff {withparent} {
+    global env rowmenuid selectedline lineid hgvdiff
+
+    if {![info exists rowmenuid]} return
+    set curid $rowmenuid
+
+    if {$withparent} {
+	set parents [exec $env(HG) --config ui.report_untrusted=false parents --rev $curid --template "{node}\n"]
+	set firstparent [lindex [split $parents "\n"] 0]
+	set otherid $firstparent
+    } else {
+	if {![info exists selectedline]} return
+	set otherid $lineid($selectedline)
+    }
+    set range "$otherid:$curid"
+    if {[catch {exec $env(HG) --config ui.report_untrusted=false $hgvdiff -r $range} err]} {
+        # Ignore errors, this is just visualization
+    }
+}
+
 proc showtag {tag isnew} {
     global ctext cflist tagcontents tagids linknum
 
@@ -3711,6 +3821,19 @@
     destroy .
 }
 
+proc getconfig {} {
+    global env
+
+    set lines [exec $env(HG) debug-config]
+    regsub -all "\r\n" $lines "\n" config
+    set config {}
+    foreach line [split $lines "\n"] {
+	regsub "^(k|v)=" $line "" line
+	lappend config $line
+    }
+    return $config
+}
+
 # defaults...
 set datemode 0
 set boldnames 0
@@ -3718,6 +3841,7 @@
 set wrcomcmd "\"\$HG\" --config ui.report_untrusted=false debug-diff-tree --stdin -p --pretty"
 
 set mainfont {Helvetica 9}
+set curidfont {}
 set textfont {Courier 9}
 set findmergefiles 0
 set gaudydiff 0
@@ -3725,8 +3849,15 @@
 set maxwidth 16
 
 set colors {green red blue magenta darkgrey brown orange}
-
-catch {source ~/.gitk}
+set authorcolors {
+    black blue deeppink mediumorchid blue burlywood4 goldenrod slateblue red2 navy dimgrey
+}
+
+catch {source ~/.hgk}
+
+if {$curidfont == ""} {  # initialize late based on current mainfont
+    set curidfont "$mainfont bold italic underline"
+}
 
 set namefont $mainfont
 if {$boldnames} {
@@ -3752,7 +3883,12 @@
 set redisplaying 0
 set stuffsaved 0
 set patchnum 0
+
+array set config [getconfig]
+set hgvdiff $config(vdiff)
 setcoords
 makewindow
 readrefs
+set hgroot [exec $env(HG) root]
+wm title . "hgk $hgroot"
 getcommits $revtreeargs
--- a/contrib/hgsh/hgsh.c	Thu Jul 26 07:56:27 2007 -0400
+++ b/contrib/hgsh/hgsh.c	Wed Feb 06 19:57:52 2008 -0800
@@ -249,7 +249,7 @@
     hg_serve,
 };
 
-    
+
 /*
  * attempt to verify that a directory is really a hg repo, by testing
  * for the existence of a subdirectory.
@@ -342,13 +342,13 @@
 
 	if (cmd != hg_init) {
 	    int valid;
-	    
+
 	    valid = validate_repo(repo_root, "data");
 
 	    if (valid == -1) {
 		goto badargs;
 	    }
-	    
+
 	    if (valid == 0) {
 		valid = validate_repo(repo_root, "store");
 
@@ -356,7 +356,7 @@
 		    goto badargs;
 		}
 	    }
-	    
+
 	    if (valid == 0) {
 		perror(repo);
 		exit(EX_DATAERR);
@@ -385,7 +385,7 @@
 	nargv[i++] = repo;
 	break;
     }
-    
+
     nargv[i] = NULL;
 
     if (debug) {
--- a/contrib/hgwebdir.fcgi	Thu Jul 26 07:56:27 2007 -0400
+++ b/contrib/hgwebdir.fcgi	Wed Feb 06 19:57:52 2008 -0800
@@ -2,14 +2,17 @@
 #
 # An example CGI script to export multiple hgweb repos, edit as necessary
 
+# adjust python path if not a system-wide install:
+#import sys
+#sys.path.insert(0, "/path/to/python/lib")
+
+# enable demandloading to reduce startup time
+from mercurial import demandimport; demandimport.enable()
+
 # send python tracebacks to the browser if an error occurs:
 import cgitb
 cgitb.enable()
 
-# adjust python path if not a system-wide install:
-#import sys
-#sys.path.insert(0, "/path/to/python/lib")
-
 # If you'd like to serve pages with UTF-8 instead of your default
 # locale charset, you can do so by uncommenting the following lines.
 # Note that this will cause your .hgrc files to be interpreted in
@@ -20,6 +23,7 @@
 
 from mercurial.hgweb.hgwebdir_mod import hgwebdir
 from mercurial.hgweb.request import wsgiapplication
+from mercurial import dispatch, ui
 from flup.server.fcgi import WSGIServer
 
 # The config file looks like this.  You can have paths to individual
@@ -41,7 +45,8 @@
 # Alternatively you can pass a list of ('virtual/path', '/real/path') tuples
 # or use a dictionary with entries like 'virtual/path': '/real/path'
 
-def make_web_app():
-    return hgwebdir("hgweb.config")
+def web_app(ui):
+    return lambda: hgwebdir("hgweb.config", ui)
 
-WSGIServer(wsgiapplication(make_web_app)).run()
+u = ui.ui(report_untrusted=False, interactive=False)
+dispatch.profiled(u, lambda: WSGIServer(wsgiapplication(web_app(u))).run())
--- a/contrib/macosx/Readme.html	Thu Jul 26 07:56:27 2007 -0400
+++ b/contrib/macosx/Readme.html	Wed Feb 06 19:57:52 2008 -0800
@@ -17,20 +17,11 @@
 <body>
 <p class="p1"><b>Before you install</b></p>
 <p class="p2"><br></p>
-<p class="p3">This is <i>not</i> a stand-alone version of Mercurial.</p>
-<p class="p2"><br></p>
-<p class="p3">To use it, you must have the appropriate version of Universal MacPython from <a href="http://www.python.org">www.python.org</a> installed.</p>
-<p class="p2"><br></p>
-<p class="p3">You can find more information and download MacPython from here:</p>
-<p class="p4"><span class="s1"><a href="http://www.python.org/download">http://www.python.org/download</a></span></p>
-<p class="p2"><br></p>
-<p class="p3">Or direct links to the latest version are:</p>
-<p class="p4"><span class="s1"><a href="http://www.python.org/ftp/python/2.5.1/python-2.5.1-macosx.dmg">Python 2.5.1 for Macintosh OS X</a></span></p>
-<p class="p4"><span class="s1"><a href="http://www.python.org/ftp/python/2.4.4/python-2.4.4-macosx2006-10-18.dmg">Python 2.4.4 for Macintosh OS X</a></span></p>
+<p class="p3">This is an OS X 10.5 version of Mercurial that depends on the default Python 2.5 installation.</p>
 <p class="p2"><br></p>
 <p class="p1"><b>After you install</b></p>
 <p class="p2"><br></p>
-<p class="p3">This package installs the <span class="s2">hg</span> executable in <span class="s2">/Library/Frameworks/Python.framework/Versions/Current/bin</span>. This directory may not be in your shell's search path. The MacPython installer will have created an entry in <span class="s2">.profile</span> for it but if your shell doesn't use <span class="s2">.profile</span> you'll need configure it yourself or create a symlink from a directory already in your path.</p>
+<p class="p3">This package installs the <span class="s2">hg</span> executable in <span class="s2">/usr/local/bin</span> and the Mercurial files in </span class="s2">/Library/Python/2.5/site-packages/mercurial.</span></p>
 <p class="p2"><br></p>
 <p class="p1"><b>Documentation</b></p>
 <p class="p2"><br></p>
--- a/contrib/mercurial.el	Thu Jul 26 07:56:27 2007 -0400
+++ b/contrib/mercurial.el	Wed Feb 06 19:57:52 2008 -0800
@@ -426,7 +426,7 @@
     (if (or (not default) current-prefix-arg)
         (string-to-number
          (eval (list* 'read-string
-                      (or prompt "") 
+                      (or prompt "")
                       (if default (cons (format "%d" default) nil) nil))))
       default)))
 
@@ -521,7 +521,7 @@
 	    (completing-read (format "Revision%s (%s): "
 				     (or prompt "")
 				     (or default "tip"))
-			     (map 'list 'cons revs revs)
+			     (mapcar (lambda (x) (cons x x)) revs)
 			     nil
 			     nil
 			     nil
@@ -565,7 +565,7 @@
 	  (when buf
 	    (set-buffer buf)
 	    (hg-mode-line-internal status parents)))))))
-  
+
 
 ;;; View mode bits.
 
@@ -588,7 +588,7 @@
   (setq hg-view-mode t)
   (setq truncate-lines t)
   (when file-name
-    (setq hg-view-file-name 
+    (setq hg-view-file-name
 	  (hg-abbrev-file-name file-name))))
 
 (defun hg-file-status (file)
@@ -666,7 +666,11 @@
 to have moved a little, but not really changed."
   (let ((point-context (hg-position-context (point)))
 	(mark-context (let ((mark (mark-marker)))
-			(and mark (hg-position-context mark)))))
+			(and mark
+			     ;; make sure active mark
+			     (marker-buffer mark)
+			     (marker-position mark)
+			     (hg-position-context mark)))))
     (list point-context mark-context)))
 
 (defun hg-find-context (ctx)
@@ -703,7 +707,7 @@
 						 (added . "a")
 						 (deleted . "!")
 						 (modified . "m"))))))))
-  
+
 (defun hg-mode-line (&optional force)
   "Update the modeline with the current status of a file.
 An update occurs if optional argument FORCE is non-nil,
@@ -1000,7 +1004,7 @@
         ;; none revision is specified explicitly
         (none (and (not rev1) (not rev2)))
         ;; only one revision is specified explicitly
-        (one (or (and (or (equal rev1 rev2) (not rev2)) rev1) 
+        (one (or (and (or (equal rev1 rev2) (not rev2)) rev1)
                  (and (not rev1) rev2)))
 	diff)
     (hg-view-output ((cond
@@ -1012,7 +1016,7 @@
 		       (format "Mercurial: Diff from rev %s to %s of %s"
 			       rev1 rev2 a-path))))
       (cond
-       (none 
+       (none
         (call-process (hg-binary) nil t nil "diff" path))
        (one
         (call-process (hg-binary) nil t nil "diff" "-r" one path))
@@ -1100,7 +1104,7 @@
         (limit (format "%d" (or log-limit hg-log-limit))))
     (hg-view-output ((if (equal r1 r2)
                          (format "Mercurial: Log of rev %s of %s" rev1 a-path)
-                       (format 
+                       (format
                         "Mercurial: at most %s log(s) from rev %s to %s of %s"
                         limit r1 r2 a-path)))
       (eval (list* 'call-process (hg-binary) nil t nil
@@ -1123,7 +1127,7 @@
   (interactive (list (hg-read-file-name " to log")
                      (hg-read-rev " to start with"
                                   "tip")
-                     (hg-read-rev " to end with" 
+                     (hg-read-rev " to end with"
 				  "0")
                      (hg-read-number "Output limited to: "
                                      hg-log-limit)))
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/contrib/mergetools.hgrc	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,49 @@
+# Some default global settings for common merge tools
+
+[merge-tools]
+kdiff3.args=--auto -L1 base --L2 local --L3 other $base $local $other -o $output
+kdiff3.regkey=Software\KDiff3
+kdiff3.regappend=\kdiff3.exe
+kdiff3.fixeol=True
+kdiff3.gui=True
+
+gvimdiff.args=--nofork -d -g -O $local $other $base
+gvimdiff.regkey=Software\Vim\GVim
+gvimdiff.regname=path
+gvimdiff.priority=-9
+
+merge.checkconflicts=True
+merge.priority=-10
+
+gpyfm.gui=True
+
+meld.gui=True
+
+tkdiff.args=$local $other -a $base -o $output
+tkdiff.gui=True
+tkdiff.priority=-8
+
+xxdiff.args=--show-merged-pane --exit-with-merge-status --title1 local --title2 base --title3 other --merged-filename $output --merge $local $base $other
+xxdiff.gui=True
+xxdiff.priority=-8
+
+diffmerge.args=--nosplash --merge --title1=base --title2=local --title3=other $base $local $other
+diffmerge.gui=True
+
+p4merge.args=$base $local $other $output
+p4merge.regkey=Software\Perforce\Environment
+p4merge.regname=P4INSTROOT
+p4merge.regappend=\p4merge.exe
+p4merge.gui=True
+p4merge.priority=-8
+
+tortoisemerge.args=/base: $output /mine:$local /theirs:$other /merged:$output
+tortoisemerge.regkey=Software\TortoiseSVN
+tortoisemerge.gui=True
+
+ecmerge.args=$base $local $other --mode=merge3 --title0=base --title1=local --title2=other --to=$output
+ecmerge.regkey=Software\Elli\xc3\xa9 Computing\Merge
+ecmerge.gui=True
+
+filemerge.args=-left $other -right $local -ancestor $base -merge $output
+filemerge.gui=True
--- a/contrib/mq.el	Thu Jul 26 07:56:27 2007 -0400
+++ b/contrib/mq.el	Wed Feb 06 19:57:52 2008 -0800
@@ -18,6 +18,7 @@
 ;; C-l').  If not, write to the Free Software Foundation, Inc., 59
 ;; Temple Place - Suite 330, Boston, MA 02111-1307, USA.
 
+(eval-when-compile (require 'cl))
 (require 'mercurial)
 
 
@@ -62,35 +63,42 @@
 (make-variable-buffer-local 'mq-prev-buffer)
 (put 'mq-prev-buffer 'permanent-local t)
 
+(defvar mq-top nil)
+(make-variable-buffer-local 'mq-top)
+(put 'mq-top 'permanent-local t)
 
 ;;; Global keymap.
 
-(defvar mq-global-map (make-sparse-keymap))
-(fset 'mq-global-map mq-global-map)
-(global-set-key mq-global-prefix 'mq-global-map)
-(define-key mq-global-map "." 'mq-push)
-(define-key mq-global-map ">" 'mq-push-all)
-(define-key mq-global-map "," 'mq-pop)
-(define-key mq-global-map "<" 'mq-pop-all)
-(define-key mq-global-map "=" 'mq-diff)
-(define-key mq-global-map "r" 'mq-refresh)
-(define-key mq-global-map "e" 'mq-refresh-edit)
-(define-key mq-global-map "i" 'mq-new)
-(define-key mq-global-map "n" 'mq-next)
-(define-key mq-global-map "o" 'mq-signoff)
-(define-key mq-global-map "p" 'mq-previous)
-(define-key mq-global-map "s" 'mq-edit-series)
-(define-key mq-global-map "t" 'mq-top)
+(defvar mq-global-map
+  (let ((map (make-sparse-keymap)))
+    (define-key map "." 'mq-push)
+    (define-key map ">" 'mq-push-all)
+    (define-key map "," 'mq-pop)
+    (define-key map "<" 'mq-pop-all)
+    (define-key map "=" 'mq-diff)
+    (define-key map "r" 'mq-refresh)
+    (define-key map "e" 'mq-refresh-edit)
+    (define-key map "i" 'mq-new)
+    (define-key map "n" 'mq-next)
+    (define-key map "o" 'mq-signoff)
+    (define-key map "p" 'mq-previous)
+    (define-key map "s" 'mq-edit-series)
+    (define-key map "t" 'mq-top)
+    map))
+
+(global-set-key mq-global-prefix mq-global-map)
 
 (add-minor-mode 'mq-mode 'mq-mode)
 
 
 ;;; Refresh edit mode keymap.
 
-(defvar mq-edit-mode-map (make-sparse-keymap))
-(define-key mq-edit-mode-map "\C-c\C-c" 'mq-edit-finish)
-(define-key mq-edit-mode-map "\C-c\C-k" 'mq-edit-kill)
-(define-key mq-edit-mode-map "\C-c\C-s" 'mq-signoff)
+(defvar mq-edit-mode-map
+  (let ((map (make-sparse-keymap)))
+    (define-key map "\C-c\C-c" 'mq-edit-finish)
+    (define-key map "\C-c\C-k" 'mq-edit-kill)
+    (define-key map "\C-c\C-s" 'mq-signoff)
+    map))
 
 
 ;;; Helper functions.
@@ -102,7 +110,7 @@
 		  (hg-chomp (hg-run0 (or source "qseries"))) "\n")))
     (when force
       (completing-read (format "Patch%s: " (or prompt ""))
-		       (map 'list 'cons patches patches)
+		       (mapcar (lambda (x) (cons x x)) patches)
 		       nil
 		       nil
 		       nil
@@ -131,7 +139,7 @@
     (let ((line (buffer-substring bol (point))))
       (when (> (length line) 0)
 	line))))
-  
+
 (defun mq-push (&optional patch)
   "Push patches until PATCH is reached.
 If PATCH is nil, push at most one patch."
@@ -166,7 +174,7 @@
       (if ok
 	  (message "Pushing... %s" last-line)
 	(error "Pushing... %s" last-line)))))
-  
+
 (defun mq-push-all ()
   "Push patches until all are applied."
   (interactive)
@@ -195,7 +203,7 @@
       (if ok
 	  (message "Popping... %s" last-line)
 	(error "Popping... %s" last-line)))))
-  
+
 (defun mq-pop-all ()
   "Push patches until none are applied."
   (interactive)
@@ -255,7 +263,7 @@
   (let ((buf mq-prev-buffer))
     (kill-buffer nil)
     (switch-to-buffer buf)))
-  
+
 (defun mq-edit-kill ()
   "Kill the edit currently being prepared."
   (interactive)
@@ -316,7 +324,7 @@
   (set-buffer-modified-p nil)
   (setq buffer-undo-list nil)
   (run-hooks 'text-mode-hook 'mq-edit-mode-hook))
-  
+
 (defun mq-refresh-edit ()
   "Refresh the topmost applied patch, editing the patch description."
   (interactive)
--- a/contrib/simplemerge	Thu Jul 26 07:56:27 2007 -0400
+++ b/contrib/simplemerge	Wed Feb 06 19:57:52 2008 -0800
@@ -1,503 +1,11 @@
 #!/usr/bin/env python
-# Copyright (C) 2004, 2005 Canonical Ltd
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation; either version 2 of the License, or
-# (at your option) any later version.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with this program; if not, write to the Free Software
-# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
-
-
-# mbp: "you know that thing where cvs gives you conflict markers?"
-# s: "i hate that."
 
 from mercurial import demandimport
 demandimport.enable()
 
-from mercurial import util, mdiff, fancyopts
+import os, sys
 from mercurial.i18n import _
-
-
-class CantReprocessAndShowBase(Exception):
-    pass
-    
-
-def warn(message):
-    sys.stdout.flush()
-    sys.stderr.write(message)
-    sys.stderr.flush()
-
-
-def intersect(ra, rb):
-    """Given two ranges return the range where they intersect or None.
-
-    >>> intersect((0, 10), (0, 6))
-    (0, 6)
-    >>> intersect((0, 10), (5, 15))
-    (5, 10)
-    >>> intersect((0, 10), (10, 15))
-    >>> intersect((0, 9), (10, 15))
-    >>> intersect((0, 9), (7, 15))
-    (7, 9)
-    """
-    assert ra[0] <= ra[1]
-    assert rb[0] <= rb[1]
-    
-    sa = max(ra[0], rb[0])
-    sb = min(ra[1], rb[1])
-    if sa < sb:
-        return sa, sb
-    else:
-        return None
-
-
-def compare_range(a, astart, aend, b, bstart, bend):
-    """Compare a[astart:aend] == b[bstart:bend], without slicing.
-    """
-    if (aend-astart) != (bend-bstart):
-        return False
-    for ia, ib in zip(xrange(astart, aend), xrange(bstart, bend)):
-        if a[ia] != b[ib]:
-            return False
-    else:
-        return True
-        
-
-
-
-class Merge3Text(object):
-    """3-way merge of texts.
-
-    Given strings BASE, OTHER, THIS, tries to produce a combined text
-    incorporating the changes from both BASE->OTHER and BASE->THIS."""
-    def __init__(self, basetext, atext, btext, base=None, a=None, b=None):
-        self.basetext = basetext
-        self.atext = atext
-        self.btext = btext
-        if base is None:
-            base = mdiff.splitnewlines(basetext)
-        if a is None:
-            a = mdiff.splitnewlines(atext)
-        if b is None:
-            b = mdiff.splitnewlines(btext)
-        self.base = base
-        self.a = a
-        self.b = b
-
-
-
-    def merge_lines(self,
-                    name_a=None,
-                    name_b=None,
-                    name_base=None,
-                    start_marker='<<<<<<<',
-                    mid_marker='=======',
-                    end_marker='>>>>>>>',
-                    base_marker=None,
-                    reprocess=False):
-        """Return merge in cvs-like form.
-        """
-        self.conflicts = False
-        newline = '\n'
-        if len(self.a) > 0:
-            if self.a[0].endswith('\r\n'):
-                newline = '\r\n'
-            elif self.a[0].endswith('\r'):
-                newline = '\r'
-        if base_marker and reprocess:
-            raise CantReprocessAndShowBase()
-        if name_a:
-            start_marker = start_marker + ' ' + name_a
-        if name_b:
-            end_marker = end_marker + ' ' + name_b
-        if name_base and base_marker:
-            base_marker = base_marker + ' ' + name_base
-        merge_regions = self.merge_regions()
-        if reprocess is True:
-            merge_regions = self.reprocess_merge_regions(merge_regions)
-        for t in merge_regions:
-            what = t[0]
-            if what == 'unchanged':
-                for i in range(t[1], t[2]):
-                    yield self.base[i]
-            elif what == 'a' or what == 'same':
-                for i in range(t[1], t[2]):
-                    yield self.a[i]
-            elif what == 'b':
-                for i in range(t[1], t[2]):
-                    yield self.b[i]
-            elif what == 'conflict':
-                self.conflicts = True
-                yield start_marker + newline
-                for i in range(t[3], t[4]):
-                    yield self.a[i]
-                if base_marker is not None:
-                    yield base_marker + newline
-                    for i in range(t[1], t[2]):
-                        yield self.base[i]
-                yield mid_marker + newline
-                for i in range(t[5], t[6]):
-                    yield self.b[i]
-                yield end_marker + newline
-            else:
-                raise ValueError(what)
-        
-        
-
-
-
-    def merge_annotated(self):
-        """Return merge with conflicts, showing origin of lines.
-
-        Most useful for debugging merge.        
-        """
-        for t in self.merge_regions():
-            what = t[0]
-            if what == 'unchanged':
-                for i in range(t[1], t[2]):
-                    yield 'u | ' + self.base[i]
-            elif what == 'a' or what == 'same':
-                for i in range(t[1], t[2]):
-                    yield what[0] + ' | ' + self.a[i]
-            elif what == 'b':
-                for i in range(t[1], t[2]):
-                    yield 'b | ' + self.b[i]
-            elif what == 'conflict':
-                yield '<<<<\n'
-                for i in range(t[3], t[4]):
-                    yield 'A | ' + self.a[i]
-                yield '----\n'
-                for i in range(t[5], t[6]):
-                    yield 'B | ' + self.b[i]
-                yield '>>>>\n'
-            else:
-                raise ValueError(what)
-        
-        
-
-
-
-    def merge_groups(self):
-        """Yield sequence of line groups.  Each one is a tuple:
-
-        'unchanged', lines
-             Lines unchanged from base
-
-        'a', lines
-             Lines taken from a
-
-        'same', lines
-             Lines taken from a (and equal to b)
-
-        'b', lines
-             Lines taken from b
-
-        'conflict', base_lines, a_lines, b_lines
-             Lines from base were changed to either a or b and conflict.
-        """
-        for t in self.merge_regions():
-            what = t[0]
-            if what == 'unchanged':
-                yield what, self.base[t[1]:t[2]]
-            elif what == 'a' or what == 'same':
-                yield what, self.a[t[1]:t[2]]
-            elif what == 'b':
-                yield what, self.b[t[1]:t[2]]
-            elif what == 'conflict':
-                yield (what,
-                       self.base[t[1]:t[2]],
-                       self.a[t[3]:t[4]],
-                       self.b[t[5]:t[6]])
-            else:
-                raise ValueError(what)
-
-
-    def merge_regions(self):
-        """Return sequences of matching and conflicting regions.
-
-        This returns tuples, where the first value says what kind we
-        have:
-
-        'unchanged', start, end
-             Take a region of base[start:end]
-
-        'same', astart, aend
-             b and a are different from base but give the same result
-
-        'a', start, end
-             Non-clashing insertion from a[start:end]
-
-        Method is as follows:
-
-        The two sequences align only on regions which match the base
-        and both descendents.  These are found by doing a two-way diff
-        of each one against the base, and then finding the
-        intersections between those regions.  These "sync regions"
-        are by definition unchanged in both and easily dealt with.
-
-        The regions in between can be in any of three cases:
-        conflicted, or changed on only one side.
-        """
-
-        # section a[0:ia] has been disposed of, etc
-        iz = ia = ib = 0
-        
-        for zmatch, zend, amatch, aend, bmatch, bend in self.find_sync_regions():
-            #print 'match base [%d:%d]' % (zmatch, zend)
-            
-            matchlen = zend - zmatch
-            assert matchlen >= 0
-            assert matchlen == (aend - amatch)
-            assert matchlen == (bend - bmatch)
-            
-            len_a = amatch - ia
-            len_b = bmatch - ib
-            len_base = zmatch - iz
-            assert len_a >= 0
-            assert len_b >= 0
-            assert len_base >= 0
-
-            #print 'unmatched a=%d, b=%d' % (len_a, len_b)
-
-            if len_a or len_b:
-                # try to avoid actually slicing the lists
-                equal_a = compare_range(self.a, ia, amatch,
-                                        self.base, iz, zmatch)
-                equal_b = compare_range(self.b, ib, bmatch,
-                                        self.base, iz, zmatch)
-                same = compare_range(self.a, ia, amatch,
-                                     self.b, ib, bmatch)
-
-                if same:
-                    yield 'same', ia, amatch
-                elif equal_a and not equal_b:
-                    yield 'b', ib, bmatch
-                elif equal_b and not equal_a:
-                    yield 'a', ia, amatch
-                elif not equal_a and not equal_b:
-                    yield 'conflict', iz, zmatch, ia, amatch, ib, bmatch
-                else:
-                    raise AssertionError("can't handle a=b=base but unmatched")
-
-                ia = amatch
-                ib = bmatch
-            iz = zmatch
-
-            # if the same part of the base was deleted on both sides
-            # that's OK, we can just skip it.
-
-                
-            if matchlen > 0:
-                assert ia == amatch
-                assert ib == bmatch
-                assert iz == zmatch
-                
-                yield 'unchanged', zmatch, zend
-                iz = zend
-                ia = aend
-                ib = bend
-    
-
-    def reprocess_merge_regions(self, merge_regions):
-        """Where there are conflict regions, remove the agreed lines.
-
-        Lines where both A and B have made the same changes are 
-        eliminated.
-        """
-        for region in merge_regions:
-            if region[0] != "conflict":
-                yield region
-                continue
-            type, iz, zmatch, ia, amatch, ib, bmatch = region
-            a_region = self.a[ia:amatch]
-            b_region = self.b[ib:bmatch]
-            matches = mdiff.get_matching_blocks(''.join(a_region),
-                                                ''.join(b_region))
-            next_a = ia
-            next_b = ib
-            for region_ia, region_ib, region_len in matches[:-1]:
-                region_ia += ia
-                region_ib += ib
-                reg = self.mismatch_region(next_a, region_ia, next_b,
-                                           region_ib)
-                if reg is not None:
-                    yield reg
-                yield 'same', region_ia, region_len+region_ia
-                next_a = region_ia + region_len
-                next_b = region_ib + region_len
-            reg = self.mismatch_region(next_a, amatch, next_b, bmatch)
-            if reg is not None:
-                yield reg
-
-
-    def mismatch_region(next_a, region_ia,  next_b, region_ib):
-        if next_a < region_ia or next_b < region_ib:
-            return 'conflict', None, None, next_a, region_ia, next_b, region_ib
-    mismatch_region = staticmethod(mismatch_region)
-            
-
-    def find_sync_regions(self):
-        """Return a list of sync regions, where both descendents match the base.
-
-        Generates a list of (base1, base2, a1, a2, b1, b2).  There is
-        always a zero-length sync region at the end of all the files.
-        """
-
-        ia = ib = 0
-        amatches = mdiff.get_matching_blocks(self.basetext, self.atext)
-        bmatches = mdiff.get_matching_blocks(self.basetext, self.btext)
-        len_a = len(amatches)
-        len_b = len(bmatches)
-
-        sl = []
-
-        while ia < len_a and ib < len_b:
-            abase, amatch, alen = amatches[ia]
-            bbase, bmatch, blen = bmatches[ib]
-
-            # there is an unconflicted block at i; how long does it
-            # extend?  until whichever one ends earlier.
-            i = intersect((abase, abase+alen), (bbase, bbase+blen))
-            if i:
-                intbase = i[0]
-                intend = i[1]
-                intlen = intend - intbase
-
-                # found a match of base[i[0], i[1]]; this may be less than
-                # the region that matches in either one
-                assert intlen <= alen
-                assert intlen <= blen
-                assert abase <= intbase
-                assert bbase <= intbase
-
-                asub = amatch + (intbase - abase)
-                bsub = bmatch + (intbase - bbase)
-                aend = asub + intlen
-                bend = bsub + intlen
-
-                assert self.base[intbase:intend] == self.a[asub:aend], \
-                       (self.base[intbase:intend], self.a[asub:aend])
-
-                assert self.base[intbase:intend] == self.b[bsub:bend]
-
-                sl.append((intbase, intend,
-                           asub, aend,
-                           bsub, bend))
-
-            # advance whichever one ends first in the base text
-            if (abase + alen) < (bbase + blen):
-                ia += 1
-            else:
-                ib += 1
-            
-        intbase = len(self.base)
-        abase = len(self.a)
-        bbase = len(self.b)
-        sl.append((intbase, intbase, abase, abase, bbase, bbase))
-
-        return sl
-
-
-
-    def find_unconflicted(self):
-        """Return a list of ranges in base that are not conflicted."""
-        am = mdiff.get_matching_blocks(self.basetext, self.atext)
-        bm = mdiff.get_matching_blocks(self.basetext, self.btext)
-
-        unc = []
-
-        while am and bm:
-            # there is an unconflicted block at i; how long does it
-            # extend?  until whichever one ends earlier.
-            a1 = am[0][0]
-            a2 = a1 + am[0][2]
-            b1 = bm[0][0]
-            b2 = b1 + bm[0][2]
-            i = intersect((a1, a2), (b1, b2))
-            if i:
-                unc.append(i)
-
-            if a2 < b2:
-                del am[0]
-            else:
-                del bm[0]
-                
-        return unc
-
-
-# bzr compatible interface, for the tests
-class Merge3(Merge3Text):
-    """3-way merge of texts.
-
-    Given BASE, OTHER, THIS, tries to produce a combined text
-    incorporating the changes from both BASE->OTHER and BASE->THIS.
-    All three will typically be sequences of lines."""
-    def __init__(self, base, a, b):
-        basetext = '\n'.join([i.strip('\n') for i in base] + [''])
-        atext = '\n'.join([i.strip('\n') for i in a] + [''])
-        btext = '\n'.join([i.strip('\n') for i in b] + [''])
-        if util.binary(basetext) or util.binary(atext) or util.binary(btext):
-            raise util.Abort(_("don't know how to merge binary files"))
-        Merge3Text.__init__(self, basetext, atext, btext, base, a, b)
-
-
-def simplemerge(local, base, other, **opts):
-    def readfile(filename):
-        f = open(filename, "rb")
-        text = f.read()
-        f.close()
-        if util.binary(text):
-            msg = _("%s looks like a binary file.") % filename
-            if not opts.get('text'):
-                raise util.Abort(msg)
-            elif not opts.get('quiet'):
-                warn(_('warning: %s\n') % msg)
-        return text
-
-    name_a = local
-    name_b = other
-    labels = opts.get('label', [])
-    if labels:
-        name_a = labels.pop(0)
-    if labels:
-        name_b = labels.pop(0)
-    if labels:
-        raise util.Abort(_("can only specify two labels."))
-
-    localtext = readfile(local)
-    basetext = readfile(base)
-    othertext = readfile(other)
-
-    orig = local
-    local = os.path.realpath(local)
-    if not opts.get('print'):
-        opener = util.opener(os.path.dirname(local))
-        out = opener(os.path.basename(local), "w", atomictemp=True)
-    else:
-        out = sys.stdout
-
-    reprocess = not opts.get('no_minimal')
-
-    m3 = Merge3Text(basetext, localtext, othertext)
-    for line in m3.merge_lines(name_a=name_a, name_b=name_b,
-                               reprocess=reprocess):
-        out.write(line)
-
-    if not opts.get('print'):
-        out.rename()
-
-    if m3.conflicts:
-        if not opts.get('quiet'):
-            warn(_("warning: conflicts during merge.\n"))
-        return 1
+from mercurial import simplemerge, fancyopts, util
 
 options = [('L', 'label', [], _('labels to use on conflict markers')),
            ('a', 'text', None, _('treat all files as text')),
@@ -511,12 +19,15 @@
 usage = _('''simplemerge [OPTS] LOCAL BASE OTHER
 
     Simple three-way file merge utility with a minimal feature set.
-    
+
     Apply to LOCAL the changes necessary to go from BASE to OTHER.
-    
+
     By default, LOCAL is overwritten with the results of this operation.
 ''')
 
+class ParseError(Exception):
+    """Exception raised on errors in parsing the command line."""
+
 def showhelp():
     sys.stdout.write(usage)
     sys.stdout.write('\noptions:\n')
@@ -530,33 +41,24 @@
     for first, second in out_opts:
         sys.stdout.write(' %-*s  %s\n' % (opts_len, first, second))
 
-class ParseError(Exception):
-    """Exception raised on errors in parsing the command line."""
-
-def main(argv):
+try:
+    opts = {}
     try:
-        opts = {}
-        try:
-            args = fancyopts.fancyopts(argv[1:], options, opts)
-        except fancyopts.getopt.GetoptError, e:
-            raise ParseError(e)
-        if opts['help']:
-            showhelp()
-            return 0
-        if len(args) != 3:
-                raise ParseError(_('wrong number of arguments'))
-        return simplemerge(*args, **opts)
-    except ParseError, e:
-        sys.stdout.write("%s: %s\n" % (sys.argv[0], e))
+        args = fancyopts.fancyopts(sys.argv[1:], options, opts)
+    except fancyopts.getopt.GetoptError, e:
+        raise ParseError(e)
+    if opts['help']:
         showhelp()
-        return 1
-    except util.Abort, e:
-        sys.stderr.write("abort: %s\n" % e)
-        return 255
-    except KeyboardInterrupt:
-        return 255
-
-if __name__ == '__main__':
-    import sys
-    import os
-    sys.exit(main(sys.argv))
+        sys.exit(0)
+    if len(args) != 3:
+            raise ParseError(_('wrong number of arguments'))
+    sys.exit(simplemerge.simplemerge(*args, **opts))
+except ParseError, e:
+    sys.stdout.write("%s: %s\n" % (sys.argv[0], e))
+    showhelp()
+    sys.exit(1)
+except util.Abort, e:
+    sys.stderr.write("abort: %s\n" % e)
+    sys.exit(255)
+except KeyboardInterrupt:
+    sys.exit(255)
--- a/contrib/vim/hg-menu.vim	Thu Jul 26 07:56:27 2007 -0400
+++ b/contrib/vim/hg-menu.vim	Wed Feb 06 19:57:52 2008 -0800
@@ -9,7 +9,7 @@
 "         Usage: These command and gui menu displays useful hg functions
 " Configuration: Your hg executable must be in your path.
 " =============================================================================
- 
+
 " Section: Init {{{1
 if exists("loaded_hg_menu")
   finish
--- a/contrib/win32/ReadMe.html	Thu Jul 26 07:56:27 2007 -0400
+++ b/contrib/win32/ReadMe.html	Wed Feb 06 19:57:52 2008 -0800
@@ -2,7 +2,7 @@
 <html>
   <head>
     <title>Mercurial for Windows</title>
-    <meta http-equiv="Content-Type" content="text/html;charset=utf-8" >  
+    <meta http-equiv="Content-Type" content="text/html;charset=utf-8" >
     <style type="text/css">
       <!--
       .indented
@@ -21,7 +21,7 @@
     <p>Mercurial is a command-line application.  You must run it from
       the Windows command prompt (or if you're hard core, a <a
       href="http://www.mingw.org/">MinGW</a> shell).</p>
-    
+
     <p><div class="indented"><i>Note: the standard <a
       href="http://www.mingw.org/">MinGW</a> msys startup script uses
       rxvt which has problems setting up standard input and output.
@@ -33,7 +33,7 @@
       href="http://hgbook.red-bean.com/">Distributed revision control
       with Mercurial</a>.</p>
 
-    <p>By default, Mercurial installs to <tt>C:\Mercurial</tt>.  The
+    <p>By default, Mercurial installs to <tt>C:\Program Files\Mercurial</tt>.  The
       Mercurial command is called <tt>hg.exe</tt>.</p>
 
     <h1>Testing Mercurial after you've installed it</h1>
@@ -59,10 +59,10 @@
 </pre>
 
 	<h4>Configuring a Merge program</h4>
-	It should be emphasized that Mercurial by itself doesn't attempt to do a 
+	It should be emphasized that Mercurial by itself doesn't attempt to do a
 	Merge at the file level, neither does it make any attempt to Resolve the conflicts.
 
-    By default, Mercurial will use the merge program defined by the HGMERGE environment 
+    By default, Mercurial will use the merge program defined by the HGMERGE environment
     variable, or uses the one defined in the mercurial.ini file. (see <a href="http://www.selenic.com/mercurial/wiki/index.cgi/MergeProgram">MergeProgram</a> on the Mercurial Wiki for more information)
 
     <h1>Reporting problems</h1>
--- a/contrib/win32/mercurial.ini	Thu Jul 26 07:56:27 2007 -0400
+++ b/contrib/win32/mercurial.ini	Wed Feb 06 19:57:52 2008 -0800
@@ -3,7 +3,7 @@
 ; USERNAME is your Windows user name:
 ;   C:\Documents and Settings\USERNAME\Mercurial.ini
 
-[ui] 
+[ui]
 editor = notepad
 
 ; By default, we try to encode and decode all files that do not
--- a/contrib/win32/mercurial.iss	Thu Jul 26 07:56:27 2007 -0400
+++ b/contrib/win32/mercurial.iss	Wed Feb 06 19:57:52 2008 -0800
@@ -15,8 +15,8 @@
 AppID={{4B95A5F1-EF59-4B08-BED8-C891C46121B3}
 AppContact=mercurial@selenic.com
 OutputBaseFilename=Mercurial-snapshot
-DefaultDirName={sd}\Mercurial
-SourceDir=C:\hg\hg-release
+DefaultDirName={pf}\Mercurial
+SourceDir=..\..
 VersionInfoDescription=Mercurial distributed SCM
 VersionInfoCopyright=Copyright 2005-2007 Matt Mackall and others
 VersionInfoCompany=Matt Mackall and others
@@ -29,17 +29,17 @@
 
 [Files]
 Source: contrib\mercurial.el; DestDir: {app}/Contrib
+Source: contrib\vim\*.*; DestDir: {app}/Contrib/Vim
+Source: contrib\zsh_completion; DestDir: {app}/Contrib
 Source: contrib\win32\ReadMe.html; DestDir: {app}; Flags: isreadme
 Source: contrib\win32\mercurial.ini; DestDir: {app}; DestName: Mercurial.ini; Flags: confirmoverwrite
 Source: contrib\win32\postinstall.txt; DestDir: {app}; DestName: ReleaseNotes.txt
 Source: dist\hg.exe; DestDir: {app}; AfterInstall: Touch('{app}\hg.exe.local')
 Source: dist\library.zip; DestDir: {app}
-Source: dist\patch.exe; DestDir: {app}
 Source: dist\mfc71.dll; DestDir: {app}
 Source: dist\msvcr71.dll; DestDir: {app}
 Source: dist\w9xpopen.exe; DestDir: {app}
 Source: dist\add_path.exe; DestDir: {app}
-Source: doc\*.txt; DestDir: {app}\Docs
 Source: doc\*.html; DestDir: {app}\Docs
 Source: templates\*.*; DestDir: {app}\Templates; Flags: recursesubdirs createallsubdirs
 Source: CONTRIBUTORS; DestDir: {app}; DestName: Contributors.txt
--- a/contrib/win32/win32-build.txt	Thu Jul 26 07:56:27 2007 -0400
+++ b/contrib/win32/win32-build.txt	Wed Feb 06 19:57:52 2008 -0800
@@ -35,7 +35,7 @@
 
 In a shell, build a standalone copy of the hg.exe program:
 
-  python setup.py build -c mingw32 
+  python setup.py build -c mingw32
   python setup.py py2exe -b 1
 
 Note: the previously suggested combined command of "python setup.py build -c
--- a/contrib/zsh_completion	Thu Jul 26 07:56:27 2007 -0400
+++ b/contrib/zsh_completion	Wed Feb 06 19:57:52 2008 -0800
@@ -13,6 +13,9 @@
 # option) any later version.
 #
 
+emulate -LR zsh
+setopt extendedglob
+
 local curcontext="$curcontext" state line
 typeset -A _hg_cmd_globals
 
@@ -153,9 +156,9 @@
   typeset -a tags
   local tag rev
 
-  _hg_cmd tags 2> /dev/null | while read tag rev
+  _hg_cmd tags 2> /dev/null | while read tag
   do
-    tags+=($tag)
+    tags+=(${tag/ #    [0-9]#:*})
   done
   (( $#tags )) && _describe -t tags 'tags' tags
 }
@@ -200,6 +203,13 @@
   _wanted files expl 'modified files' _multi_parts / status_files
 }
 
+_hg_config() {
+    typeset -a items
+    local line
+    items=(${${(%f)"$(_hg_cmd showconfig)"}%%\=*})
+    (( $#items )) && _describe -t config 'config item' items
+}
+
 _hg_addremove() {
   _alternative 'files:unknown files:_hg_unknown' \
     'files:missing files:_hg_missing'
@@ -352,6 +362,17 @@
   '*:destination:_files'
 }
 
+_hg_cmd_backout() {
+  _arguments -s -w : $_hg_global_opts $_hg_pat_opts \
+    '--merge[merge with old dirstate parent after backout]' \
+    '(--date -d)'{-d+,--date}'[record datecode as commit date]:date code:' \
+    '--parent[parent to choose when backing out merge]' \
+    '(--user -u)'{-u+,--user}'[record user as commiter]:user:' \
+    '(--rev -r)'{-r+,--rev}'[revision]:revision:_hg_tags' \
+    '(--message -m)'{-m+,--message}'[use <text> as commit message]:text:' \
+    '(--logfile -l)'{-l+,--logfile}'[read commit message from <file>]:log file:_files -g \*.txt'
+}
+
 _hg_cmd_bundle() {
   _arguments -s -w : $_hg_global_opts $_hg_remote_opts \
   '(--force -f)'{-f,--force}'[run even when remote repository is unrelated]' \
@@ -431,7 +452,8 @@
   '(--line-number -n)'{-n,--line-number}'[print matching line numbers]' \
   '*'{-r+,--rev}'[search in given revision range]:revision:_hg_revrange' \
   '(--user -u)'{-u,--user}'[print user who committed change]' \
-  '*:search pattern:_hg_files'
+  '1:search pattern:' \
+  '*:files:_hg_files'
 }
 
 _hg_cmd_heads() {
@@ -444,6 +466,15 @@
   '*:mercurial command:_hg_commands'
 }
 
+_hg_cmd_identify() {
+  _arguments -s -w : $_hg_global_opts \
+  '(--rev -r)'{-r+,--rev}'[identify the specified rev]:revision:_hg_tags' \
+  '(--num -n)'{-n+,--num}'[show local revision number]' \
+  '(--id -i)'{-i+,--id}'[show global revision id]' \
+  '(--branch -b)'{-b+,--branch}'[show branch]' \
+  '(--tags -t)'{-t+,--tags}'[show tags]'
+}
+
 _hg_cmd_import() {
   _arguments -s -w : $_hg_global_opts \
   '(--strip -p)'{-p+,--strip}'[directory strip option for patch (default: 1)]:count:' \
@@ -457,7 +488,7 @@
   '(--no-merges -M)'{-M,--no-merges}'[do not show merge revisions]' \
   '(--force -f)'{-f,--force}'[run even when the remote repository is unrelated]' \
   '(--patch -p)'{-p,--patch}'[show patch]' \
-  '(--rev -r)'{-r+,--rev}'[a specific revision up to which you would like to pull]' \
+  '(--rev -r)'{-r+,--rev}'[a specific revision up to which you would like to pull]:revision:_hg_tags' \
   '(--newest-first -n)'{-n,--newest-first}'[show newest record first]' \
   '--bundle[file to store the bundles into]:bundle file:_files' \
   ':source:_hg_remote'
@@ -509,7 +540,7 @@
 _hg_cmd_parents() {
   _arguments -s -w : $_hg_global_opts $_hg_style_opts \
   '(--rev -r)'{-r+,--rev}'[show parents of the specified rev]:revision:_hg_tags' \
-  ':revision:_hg_tags'
+  ':last modified file:_hg_files'
 }
 
 _hg_cmd_paths() {
@@ -521,13 +552,14 @@
   _arguments -s -w : $_hg_global_opts $_hg_remote_opts \
   '(--force -f)'{-f,--force}'[run even when the remote repository is unrelated]' \
   '(--update -u)'{-u,--update}'[update to new tip if changesets were pulled]' \
+  '(--rev -r)'{-r+,--rev}'[a specific revision up to which you would like to pull]:revision:' \
   ':source:_hg_remote'
 }
 
 _hg_cmd_push() {
   _arguments -s -w : $_hg_global_opts $_hg_remote_opts \
   '(--force -f)'{-f,--force}'[force push]' \
-  '(--rev -r)'{-r+,--rev}'[a specific revision you would like to push]' \
+  '(--rev -r)'{-r+,--rev}'[a specific revision you would like to push]:revision:_hg_tags' \
   ':destination:_hg_remote'
 }
 
@@ -579,6 +611,12 @@
   '(--ipv6 -6)'{-6,--ipv6}'[use IPv6 in addition to IPv4]'
 }
 
+_hg_cmd_showconfig() {
+  _arguments -s -w : $_hg_global_opts \
+  '(--untrusted -u)'{-u+,--untrusted}'[show untrusted configuration options]' \
+  ':config item:_hg_config'
+}
+
 _hg_cmd_status() {
   _arguments -s -w : $_hg_global_opts $_hg_pat_opts \
   '(--all -A)'{-A,--all}'[show status of all files]' \
@@ -620,9 +658,15 @@
 _hg_cmd_update() {
   _arguments -s -w : $_hg_global_opts \
   '(--clean -C)'{-C,--clean}'[overwrite locally modified files]' \
+  '(--rev -r)'{-r+,--rev}'[revision]:revision:_hg_tags' \
   ':revision:_hg_tags'
 }
 
+# bisect extension
+_hg_cmd_bisect() {
+  _arguments -s -w : $_hg_global_opts ':evaluation:(help init reset next good bad)'
+}
+
 # HGK
 _hg_cmd_view() {
   _arguments -s -w : $_hg_global_opts \
@@ -633,13 +677,13 @@
 # MQ
 _hg_qseries() {
   typeset -a patches
-  patches=($(_hg_cmd qseries 2>/dev/null))
+  patches=(${(f)"$(_hg_cmd qseries 2>/dev/null)"})
   (( $#patches )) && _describe -t hg-patches 'patches' patches
 }
 
 _hg_qapplied() {
   typeset -a patches
-  patches=($(_hg_cmd qapplied 2>/dev/null))
+  patches=(${(f)"$(_hg_cmd qapplied 2>/dev/null)"})
   if (( $#patches ))
   then
     patches+=(qbase qtip)
@@ -649,7 +693,7 @@
 
 _hg_qunapplied() {
   typeset -a patches
-  patches=($(_hg_cmd qunapplied 2>/dev/null))
+  patches=(${(f)"$(_hg_cmd qunapplied 2>/dev/null)"})
   (( $#patches )) && _describe -t hg-unapplied-patches 'unapplied patches' patches
 }
 
@@ -689,6 +733,12 @@
   '*:unapplied patch:_hg_qunapplied'
 }
 
+_hg_cmd_qgoto() {
+  _arguments -s -w : $_hg_global_opts \
+  '(--force -f)'{-f,--force}'[overwrite any local changes]' \
+  ':patch:_hg_qseries'
+}
+
 _hg_cmd_qguard() {
   _arguments -s -w : $_hg_global_opts \
   '(--list -l)'{-l,--list}'[list all patches and guards]' \
--- a/doc/README	Thu Jul 26 07:56:27 2007 -0400
+++ b/doc/README	Wed Feb 06 19:57:52 2008 -0800
@@ -18,4 +18,6 @@
 
 To create the html page (without stylesheets):
 
- asciidoc -b html hg.1.txt
+ asciidoc -b html4 hg.1.txt
+
+(older asciidoc may want html instead of html4 above)
--- a/doc/gendoc.py	Thu Jul 26 07:56:27 2007 -0400
+++ b/doc/gendoc.py	Wed Feb 06 19:57:52 2008 -0800
@@ -1,6 +1,7 @@
 import sys, textwrap
 # import from the live mercurial repo
 sys.path.insert(0, "..")
+from mercurial import demandimport; demandimport.enable()
 from mercurial.commands import table, globalopts
 from mercurial.i18n import gettext as _
 from mercurial.help import helptable
--- a/doc/hg.1.txt	Thu Jul 26 07:56:27 2007 -0400
+++ b/doc/hg.1.txt	Wed Feb 06 19:57:52 2008 -0800
@@ -62,6 +62,14 @@
     The reserved name "tip" is a special tag that always identifies
     the most recent revision.
 
+    The reserved name "null" indicates the null revision. This is the
+    revision of an empty repository, and the parent of revision 0.
+
+    The reserved name "." indicates the working directory parent. If
+    no working directory is checked out, it is equivalent to null.
+    If an uncommitted merge is in progress, "." is the revision of
+    the first parent.
+
 SPECIFYING MULTIPLE REVISIONS
 -----------------------------
 
@@ -83,11 +91,11 @@
 
 FILES
 -----
- .hgignore::
+ repo/.hgignore::
     This file contains regular expressions (one per line) that describe file
     names that should be ignored by hg. For details, see hgignore(5).
 
- .hgtags::
+ repo/.hgtags::
     This file contains changeset hash values and text tag names (one of each
     separated by spaces) that correspond to tagged versions of the repository
     contents.
--- a/doc/hgmerge.1.txt	Thu Jul 26 07:56:27 2007 -0400
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,35 +0,0 @@
-HGMERGE(1)
-==========
-Matt Mackall <mpm@selenic.com>
-v0.1, 27 May 2005
-
-NAME
-----
-hgmerge - default wrapper to merge files in Mercurial SCM system
-
-SYNOPSIS
---------
-'hgmerge' local ancestor remote
-
-DESCRIPTION
------------
-The hgmerge(1) command provides a graphical interface to merge files in the
-Mercurial system. It is a simple wrapper around kdiff3, merge(1) and tkdiff(1),
-or simply diff(1) and patch(1) depending on what is present on the system.
-
-hgmerge(1) is used by the Mercurial SCM if the environment variable HGMERGE is
-not set.
-
-AUTHOR
-------
-Written by Vincent Danjean <Vincent.Danjean@free.fr>
-
-SEE ALSO
---------
-hg(1) - the command line interface to Mercurial SCM
-
-COPYING
--------
-Copyright \(C) 2005-2007 Matt Mackall.
-Free use of this software is granted under the terms of the GNU General
-Public License (GPL).
--- a/doc/hgrc.5.txt	Thu Jul 26 07:56:27 2007 -0400
+++ b/doc/hgrc.5.txt	Wed Feb 06 19:57:52 2008 -0800
@@ -17,7 +17,9 @@
 
 Mercurial reads configuration data from several files, if they exist.
 The names of these files depend on the system on which Mercurial is
-installed.
+installed. Windows registry keys contain PATH-like strings, every
+part must reference a Mercurial.ini file or be a directory where *.rc
+files will be read.
 
 (Unix)    <install-root>/etc/mercurial/hgrc.d/*.rc::
 (Unix)    <install-root>/etc/mercurial/hgrc::
@@ -29,6 +31,8 @@
 
 (Unix)    /etc/mercurial/hgrc.d/*.rc::
 (Unix)    /etc/mercurial/hgrc::
+(Windows) HKEY_LOCAL_MACHINE\SOFTWARE\Mercurial::
+  or::
 (Windows) C:\Mercurial\Mercurial.ini::
     Per-system configuration files, for the system on which Mercurial
     is running.  Options in these files apply to all Mercurial
@@ -120,34 +124,39 @@
 
   NOTE: the tempfile mechanism is recommended for Windows systems,
   where the standard shell I/O redirection operators often have
-  strange effects.  In particular, if you are doing line ending
-  conversion on Windows using the popular dos2unix and unix2dos
-  programs, you *must* use the tempfile mechanism, as using pipes will
-  corrupt the contents of your files.
+  strange effects and may corrupt the contents of your files.
 
-  Tempfile example:
+  The most common usage is for LF <-> CRLF translation on Windows.
+  For this, use the "smart" convertors which check for binary files:
 
+    [extensions]
+    hgext.win32text =
     [encode]
-    # convert files to unix line ending conventions on checkin
-    **.txt = tempfile: dos2unix -n INFILE OUTFILE
-
+    ** = cleverencode:
     [decode]
-    # convert files to windows line ending conventions when writing
-    # them to the working dir
-    **.txt = tempfile: unix2dos -n INFILE OUTFILE
+    ** = cleverdecode:
+
+  or if you only want to translate certain files:
+
+    [extensions]
+    hgext.win32text =
+    [encode]
+    **.txt = dumbencode:
+    [decode]
+    **.txt = dumbdecode:
 
 defaults::
-  Use the [defaults] section to define command defaults, i.e. the 
+  Use the [defaults] section to define command defaults, i.e. the
   default options/arguments to pass to the specified commands.
-  
+
   The following example makes 'hg log' run in verbose mode, and
   'hg status' show only the modified files, by default.
-  
+
     [defaults]
     log = -v
     status = -m
-  
-  The actual commands, instead of their aliases, must be used when 
+
+  The actual commands, instead of their aliases, must be used when
   defining command defaults. The command defaults will also be
   applied to the aliases of the commands defined.
 
@@ -224,6 +233,78 @@
     you to store longer filenames in some situations at the expense of
     compatibility.
 
+merge-patterns::
+  This section specifies merge tools to associate with particular file
+  patterns. Tools matched here will take precedence over the default
+  merge tool. Patterns are globs by default, rooted at the repository root.
+
+  Example:
+
+    [merge-patterns]
+    **.c = kdiff3
+    **.jpg = myimgmerge
+
+merge-tools::
+  This section configures external merge tools to use for file-level
+  merges.
+
+  Example ~/.hgrc:
+
+    [merge-tools]
+    # Override stock tool location
+    kdiff3.executable = ~/bin/kdiff3
+    # Specify command line
+    kdiff3.args = $base $local $other -o $output
+    # Give higher priority
+    kdiff3.priority = 1
+
+    # Define new tool
+    myHtmlTool.args = -m $local $other $base $output
+    myHtmlTool.regkey = Software\FooSoftware\HtmlMerge
+    myHtmlTool.priority = 1
+
+  Supported arguments:
+  priority;;
+    The priority in which to evaluate this tool.
+    Default: 0.
+  executable;;
+    Either just the name of the executable or its pathname.
+    Default: the tool name.
+  args;;
+    The arguments to pass to the tool executable. You can refer to the files
+    being merged as well as the output file through these variables: $base,
+    $local, $other, $output.
+    Default: $local $base $other
+  premerge;;
+    Attempt to run internal non-interactive 3-way merge tool before
+    launching external tool.
+    Default: True
+  binary;;
+    This tool can merge binary files.  Defaults to False, unless tool
+    was selected by file pattern match.
+  symlink;;
+    This tool can merge symlinks.  Defaults to False, even if tool was
+    selected by file pattern match.
+  checkconflicts;;
+    Check whether there are conflicts even though the tool reported
+    success.
+    Default: False
+  fixeol;;
+    Attempt to fix up EOL changes caused by the merge tool.
+    Default: False
+  gui:;
+    This tool requires a graphical interface to run. Default: False
+  regkey;;
+    Windows registry key which describes install location of this tool.
+    Mercurial will search for this key first under HKEY_CURRENT_USER and
+    then under HKEY_LOCAL_MACHINE.  Default: None
+  regname;;
+    Name of value to read from specified registry key.  Defaults to the
+    unnamed (default) value.
+  regappend;;
+    String to append to the value read from the registry, typically the
+    executable name of the tool.  Default: None
+
 hooks::
   Commands or Python functions that get automatically executed by
   various actions such as starting or finishing a commit. Multiple
@@ -277,7 +358,7 @@
     commit to proceed.  Non-zero status will cause the commit to fail.
     Parent changeset IDs are in $HG_PARENT1 and $HG_PARENT2.
   preoutgoing;;
-    Run before computing changes to send from the local repository to
+    Run before collecting changes to send from the local repository to
     another.  Non-zero status will cause failure.  This lets you
     prevent pull over http or ssh.  Also prevents against local pull,
     push (outbound) or bundle commands, but not effective, since you
@@ -394,6 +475,20 @@
     Optional.  Directory or URL to use when pushing if no destination
     is specified.
 
+profile::
+  Configuration of profiling options, for in-depth performance
+  analysis.  Mostly useful to developers.
+  enable;;
+    Enable a particular profiling mode.  Useful for profiling
+    server-side processes.  "lsprof" enables modern profiling.
+    "hotshot" is deprecated, and produces less reliable results.
+    Default is no profiling.
+  output;;
+    The name of a file to write profiling data to.  Each occurrence of
+    "%%p" will be replaced with the current process ID (the repeated
+    "%" protects against the config parser's string interpolator).
+    Default output is to stderr.
+  
 server::
   Controls generic server settings.
   uncompressed;;
@@ -444,7 +539,18 @@
     Template string for commands that print changesets.
   merge;;
     The conflict resolution program to use during a manual merge.
-    Default is "hgmerge".
+    There are some internal tools available:
+
+      internal:local;;
+        keep the local version
+      internal:other;;
+        use the other version
+      internal:merge;;
+        use the internal non-interactive merge tool
+      internal:fail;;
+        fail to merge
+
+    See the merge-tools section for more information on configuring tools.
   patch;;
     command to use to apply patches. Look for 'gpatch' or 'patch' in PATH if
     unset.
@@ -515,7 +621,7 @@
     Example: "http://hgserver/repos/"
   contact;;
     Name or email address of the person in charge of the repository.
-    Default is "unknown".
+    Defaults to ui.username or $EMAIL or "unknown" if unset or empty.
   deny_push;;
     Whether to deny pushing to the repository.  If empty or not set,
     push is not denied.  If the special value "*", all remote users
@@ -544,6 +650,8 @@
     Maximum number of files to list per changeset. Default is 10.
   port;;
     Port to listen on. Default is 8000.
+  prefix;;
+    Prefix path to serve from. Default is '' (server root).
   push_ssl;;
     Whether to require that inbound pushes be transported over SSL to
     prevent password sniffing.  Default is true.
--- a/doc/ja/docbook.ja.conf	Thu Jul 26 07:56:27 2007 -0400
+++ b/doc/ja/docbook.ja.conf	Wed Feb 06 19:57:52 2008 -0800
@@ -481,7 +481,7 @@
 </book>
 
 [preamble]
-# Preamble is not allowed in DocBook book so wrap it in a preface. 
+# Preamble is not allowed in DocBook book so wrap it in a preface.
 <preface{id? id="{id}"}>
 <title>Preface</title>
 |
--- a/doc/ja/hg.1.ja.txt	Thu Jul 26 07:56:27 2007 -0400
+++ b/doc/ja/hg.1.ja.txt	Wed Feb 06 19:57:52 2008 -0800
@@ -219,7 +219,7 @@
     -f, --force           既存の変更されたファイルに無理矢理コピー
                           します
     -p, --parents         コピー先にコピー元のパスを追加します
-    
+
     別名: cp
 
 diff [-a] [-r revision] [-r revision] [files ...]::
@@ -388,7 +388,7 @@
 
     オプション:
 
-    -0, --print0         xargs と一緒に使うために、ファイル名を 
+    -0, --print0         xargs と一緒に使うために、ファイル名を
                          NUL で終えます
     -f, --fullpath       ファイルシステムのルートからの完全なパ
                          スを表示します
@@ -580,9 +580,9 @@
     -E, --errorlog <file>    エラーログが出力されるファイルの名前を
                              指定します
     -a, --address <addr>     使用するアドレスを指定します
-    -p, --port <n>           使用するポートを指定します 
+    -p, --port <n>           使用するポートを指定します
                              (デフォルト: 8000)
-    -n, --name <name>        ウェブページで表示する名前を指定します 
+    -n, --name <name>        ウェブページで表示する名前を指定します
                              (デフォルト: working dir)
     -t, --templatedir <path> 使用するウェブの雛型を指定します
     -6, --ipv6               IPv4 に加えて IPv6 も使用します
--- a/doc/ja/hgmerge.1.ja.txt	Thu Jul 26 07:56:27 2007 -0400
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,37 +0,0 @@
-HGMERGE(1)
-==========
-Matt Mackall <mpm@selenic.com>
-v0.1, 27 May 2005
-
-名前
---
-hgmerge - Mercurial ソースコード管理システムでファイルをマージする
-のに使われるデフォルトのラッパー
-
-書式
---
-'hgmerge' local ancestor remote
-
-説明
---
-hgmerge(1) コマンドは Mercurial システムでファイルをマージするため
-のグラフィカルなインターフェイスを提供します。これは kdiff3,
-merge(1), tkdiff(1), または単純に diff(1) と patch(1) のラッパーで、
-どれがシステム上にあるかに依存します。
-
-hgmerge(1) は Mercurial ソースコード管理システムで環境変数
-HGMERGE が設定されていない場合に使われます。
-
-著者
---
-Vincent Danjean <Vincent.Danjean@free.fr> によって書かれました。
-
-関連情報
---
-hg(1) - Mercurial システムへのコマンドラインインターフェイス
-
-著作権情報
-----
-Copyright (C) 2005-2007 Matt Mackall.
-このソフトウェアの自由な使用は GNU 一般公有使用許諾 (GPL) のもとで
-認められます。
--- a/doc/ja/hgrc.5.ja.txt	Thu Jul 26 07:56:27 2007 -0400
+++ b/doc/ja/hgrc.5.ja.txt	Wed Feb 06 19:57:52 2008 -0800
@@ -61,7 +61,7 @@
 クションのそれぞれの目的や可能なキー、そして取り得る値について記述
 します。
 
-decode/encode:: 
+decode/encode::
   checkout/checkin でファイルを転送するときのフィルターです。これ
   は典型的には改行を処理したり、他の地域化/標準化に使われるでしょ
   う。
@@ -98,7 +98,7 @@
   HTTP プロキシを通してウェブを使った Mercurial のリポジトリにアク
   セスするのに使われます。
   host;;
-    プロキシサーバのホスト名と(オプションの)ポートで、例えば 
+    プロキシサーバのホスト名と(オプションの)ポートで、例えば
     "myproxy:8000"などです。
   no;;
     オプションです。コンマで区切られたプロキシを通過すべきホスト名
--- a/hg	Thu Jul 26 07:56:27 2007 -0400
+++ b/hg	Wed Feb 06 19:57:52 2008 -0800
@@ -7,5 +7,14 @@
 # This software may be used and distributed according to the terms
 # of the GNU General Public License, incorporated herein by reference.
 
-import mercurial.commands
-mercurial.commands.run()
+# enable importing on demand to reduce startup time
+from mercurial import demandimport; demandimport.enable()
+
+import sys
+import mercurial.util
+import mercurial.dispatch
+
+for fp in (sys.stdin, sys.stdout, sys.stderr):
+    mercurial.util.set_binary(fp)
+
+mercurial.dispatch.run()
--- a/hgext/alias.py	Thu Jul 26 07:56:27 2007 -0400
+++ b/hgext/alias.py	Wed Feb 06 19:57:52 2008 -0800
@@ -42,7 +42,7 @@
             return
 
         try:
-            self._cmd = findcmd(self._ui, self._target)[1]
+            self._cmd = findcmd(self._ui, self._target, commands.table)[1]
             if self._cmd == self:
                 raise RecursiveCommand()
             if self._target in commands.norepo.split(' '):
--- a/hgext/bugzilla.py	Thu Jul 26 07:56:27 2007 -0400
+++ b/hgext/bugzilla.py	Wed Feb 06 19:57:52 2008 -0800
@@ -282,7 +282,7 @@
                root=self.repo.root,
                webroot=webroot(self.repo.root))
         data = self.ui.popbuffer()
-        self.add_comment(bugid, data, templater.email(ctx.user()))
+        self.add_comment(bugid, data, util.email(ctx.user()))
 
 def hook(ui, repo, hooktype, node=None, **kwargs):
     '''add comment to bugzilla for each changeset that refers to a
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/hgext/color.py	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,219 @@
+# color.py color output for the status and qseries commands
+#
+# Copyright (C) 2007 Kevin Christen <kevin.christen@gmail.com>
+#
+# This program is free software; you can redistribute it and/or modify it
+# under the terms of the GNU General Public License as published by the
+# Free Software Foundation; either version 2 of the License, or (at your
+# option) any later version.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General
+# Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+'''add color output to the status and qseries commands
+
+This extension modifies the status command to add color to its output to
+reflect file status, and the qseries command to add color to reflect patch
+status (applied, unapplied, missing).  Other effects in addition to color,
+like bold and underlined text, are also available.  Effects are rendered
+with the ECMA-48 SGR control function (aka ANSI escape codes).  This module
+also provides the render_text function, which can be used to add effects to
+any text.
+
+To enable this extension, add this to your .hgrc file:
+[extensions]
+color =
+
+Default effects my be overriden from the .hgrc file:
+
+[color]
+status.modified = blue bold underline red_background
+status.added = green bold
+status.removed = red bold blue_background
+status.deleted = cyan bold underline
+status.unknown = magenta bold underline
+status.ignored = black bold
+
+ 'none' turns off all effects
+status.clean = none
+status.copied = none
+
+qseries.applied = blue bold underline
+qseries.unapplied = black bold
+qseries.missing = red bold
+'''
+
+import re, sys
+
+from mercurial import commands, cmdutil, ui
+from mercurial.i18n import _
+
+# start and stop parameters for effects
+_effect_params = { 'none': (0, 0),
+                   'black': (30, 39),
+                   'red': (31, 39),
+                   'green': (32, 39),
+                   'yellow': (33, 39),
+                   'blue': (34, 39),
+                   'magenta': (35, 39),
+                   'cyan': (36, 39),
+                   'white': (37, 39),
+                   'bold': (1, 22),
+                   'italic': (3, 23),
+                   'underline': (4, 24),
+                   'inverse': (7, 27),
+                   'black_background': (40, 49),
+                   'red_background': (41, 49),
+                   'green_background': (42, 49),
+                   'yellow_background': (43, 49),
+                   'blue_background': (44, 49),
+                   'purple_background': (45, 49),
+                   'cyan_background': (46, 49),
+                   'white_background': (47, 49), }
+
+def render_effects(text, *effects):
+    'Wrap text in commands to turn on each effect.'
+    start = []
+    stop = []
+    for effect in effects:
+        start.append(str(_effect_params[effect][0]))
+        stop.append(str(_effect_params[effect][1]))
+    start = '\033[' + ';'.join(start) + 'm'
+    stop = '\033[' + ';'.join(stop) + 'm'
+    return start + text + stop
+
+def colorstatus(statusfunc, ui, repo, *pats, **opts):
+    '''run the status command with colored output'''
+
+    delimiter = opts['print0'] and '\0' or '\n'
+
+    # run status and capture it's output
+    ui.pushbuffer()
+    retval = statusfunc(ui, repo, *pats, **opts)
+    # filter out empty strings
+    lines = [ line for line in ui.popbuffer().split(delimiter) if line ]
+
+    if opts['no_status']:
+        # if --no-status, run the command again without that option to get
+        # output with status abbreviations
+        opts['no_status'] = False
+        ui.pushbuffer()
+        statusfunc(ui, repo, *pats, **opts)
+        # filter out empty strings
+        lines_with_status = [ line for
+                              line in ui.popbuffer().split(delimiter) if line ]
+    else:
+        lines_with_status = lines
+
+    # apply color to output and display it
+    for i in xrange(0, len(lines)):
+        status = _status_abbreviations[lines_with_status[i][0]]
+        effects = _status_effects[status]
+        if effects:
+            lines[i] = render_effects(lines[i], *effects)
+        sys.stdout.write(lines[i] + delimiter)
+    return retval
+
+_status_abbreviations = { 'M': 'modified',
+                          'A': 'added',
+                          'R': 'removed',
+                          '!': 'deleted',
+                          '?': 'unknown',
+                          'I': 'ignored',
+                          'C': 'clean',
+                          ' ': 'copied', }
+
+_status_effects = { 'modified': ('blue', 'bold'),
+                    'added': ('green', 'bold'),
+                    'removed': ('red', 'bold'),
+                    'deleted': ('cyan', 'bold', 'underline'),
+                    'unknown': ('magenta', 'bold', 'underline'),
+                    'ignored': ('black', 'bold'),
+                    'clean': ('none', ),
+                    'copied': ('none', ), }
+
+def colorqseries(qseriesfunc, ui, repo, *dummy, **opts):
+    '''run the qseries command with colored output'''
+    ui.pushbuffer()
+    retval = qseriesfunc(ui, repo, **opts)
+    patches = ui.popbuffer().splitlines()
+    for patch in patches:
+        if opts['missing']:
+            effects = _patch_effects['missing']
+        # Determine if patch is applied.  Search for beginning of output
+        # line in the applied patch list, in case --summary has been used
+        # and output line isn't just the patch name.
+        elif [ applied for applied in repo.mq.applied
+               if patch.startswith(applied.name) ]:
+            effects = _patch_effects['applied']
+        else:
+            effects = _patch_effects['unapplied']
+        sys.stdout.write(render_effects(patch, *effects) + '\n')
+    return retval
+
+_patch_effects = { 'applied': ('blue', 'bold', 'underline'),
+                   'missing': ('red', 'bold'),
+                   'unapplied': ('black', 'bold'), }
+
+def uisetup(ui):
+    '''Initialize the extension.'''
+    nocoloropt = ('', 'no-color', None, _("don't colorize output"))
+    _decoratecmd(ui, 'status', commands.table, colorstatus, nocoloropt)
+    _configcmdeffects(ui, 'status', _status_effects);
+    if ui.config('extensions', 'hgext.mq', default=None) is not None:
+        from hgext import mq
+        _decoratecmd(ui, 'qseries', mq.cmdtable, colorqseries, nocoloropt)
+        _configcmdeffects(ui, 'qseries', _patch_effects);
+
+def _decoratecmd(ui, cmd, table, delegate, *delegateoptions):
+    '''Replace the function that implements cmd in table with a decorator.
+
+    The decorator that becomes the new implementation of cmd calls
+    delegate.  The delegate's first argument is the replaced function,
+    followed by the normal Mercurial command arguments (ui, repo, ...).  If
+    the delegate adds command options, supply them as delegateoptions.
+    '''
+    cmdkey, cmdentry = _cmdtableitem(ui, cmd, table)
+    decorator = lambda ui, repo, *args, **opts: \
+                    _colordecorator(delegate, cmdentry[0],
+                                    ui, repo, *args, **opts)
+    # make sure 'hg help cmd' still works
+    decorator.__doc__ = cmdentry[0].__doc__
+    decoratorentry = (decorator,) + cmdentry[1:]
+    for option in delegateoptions:
+        decoratorentry[1].append(option)
+    table[cmdkey] = decoratorentry
+
+def _cmdtableitem(ui, cmd, table):
+    '''Return key, value from table for cmd, or None if not found.'''
+    aliases, entry = cmdutil.findcmd(ui, cmd, table)
+    for candidatekey, candidateentry in table.iteritems():
+        if candidateentry is entry:
+            return candidatekey, entry
+
+def _colordecorator(colorfunc, nocolorfunc, ui, repo, *args, **opts):
+    '''Delegate to colorfunc or nocolorfunc, depending on conditions.
+
+    Delegate to colorfunc unless --no-color option is set or output is not
+    to a tty.
+    '''
+    if opts['no_color'] or not sys.stdout.isatty():
+        return nocolorfunc(ui, repo, *args, **opts)
+    return colorfunc(nocolorfunc, ui, repo, *args, **opts)
+
+def _configcmdeffects(ui, cmdname, effectsmap):
+    '''Override default effects for cmdname with those from .hgrc file.
+
+    Entries in the .hgrc file are in the [color] section, and look like
+    'cmdname'.'status' (for instance, 'status.modified = blue bold inverse').
+    '''
+    for status in effectsmap:
+        effects = ui.config('color', cmdname + '.' + status)
+        if effects:
+            effectsmap[status] = re.split('\W+', effects)
--- a/hgext/convert/__init__.py	Thu Jul 26 07:56:27 2007 -0400
+++ b/hgext/convert/__init__.py	Wed Feb 06 19:57:52 2008 -0800
@@ -5,270 +5,25 @@
 # This software may be used and distributed according to the terms
 # of the GNU General Public License, incorporated herein by reference.
 
-from common import NoRepo, converter_source, converter_sink
-from cvs import convert_cvs
-from git import convert_git
-from hg import convert_mercurial
-from subversion import convert_svn
-
-import os, shutil
-from mercurial import hg, ui, util, commands
-
-commands.norepo += " convert"
-
-converters = [convert_cvs, convert_git, convert_svn, convert_mercurial]
-
-def convertsource(ui, path, **opts):
-    for c in converters:
-        if not hasattr(c, 'getcommit'):
-            continue
-        try:
-            return c(ui, path, **opts)
-        except NoRepo:
-            pass
-    raise util.Abort('%s: unknown repository type' % path)
-
-def convertsink(ui, path):
-    if not os.path.isdir(path):
-        raise util.Abort("%s: not a directory" % path)
-    for c in converters:
-        if not hasattr(c, 'putcommit'):
-            continue
-        try:
-            return c(ui, path)
-        except NoRepo:
-            pass
-    raise util.Abort('%s: unknown repository type' % path)
-
-class convert(object):
-    def __init__(self, ui, source, dest, mapfile, opts):
-
-        self.source = source
-        self.dest = dest
-        self.ui = ui
-        self.opts = opts
-        self.commitcache = {}
-        self.mapfile = mapfile
-        self.mapfilefd = None
-        self.authors = {}
-        self.authorfile = None
-
-        self.map = {}
-        try:
-            origmapfile = open(self.mapfile, 'r')
-            for l in origmapfile:
-                sv, dv = l[:-1].split()
-                self.map[sv] = dv
-            origmapfile.close()
-        except IOError:
-            pass
-
-        # Read first the dst author map if any
-        authorfile = self.dest.authorfile()
-        if authorfile and os.path.exists(authorfile):
-            self.readauthormap(authorfile)
-        # Extend/Override with new author map if necessary
-        if opts.get('authors'):
-            self.readauthormap(opts.get('authors'))
-            self.authorfile = self.dest.authorfile()
-
-    def walktree(self, heads):
-        '''Return a mapping that identifies the uncommitted parents of every
-        uncommitted changeset.'''
-        visit = heads
-        known = {}
-        parents = {}
-        while visit:
-            n = visit.pop(0)
-            if n in known or n in self.map: continue
-            known[n] = 1
-            self.commitcache[n] = self.source.getcommit(n)
-            cp = self.commitcache[n].parents
-            parents[n] = []
-            for p in cp:
-                parents[n].append(p)
-                visit.append(p)
-
-        return parents
-
-    def toposort(self, parents):
-        '''Return an ordering such that every uncommitted changeset is
-        preceeded by all its uncommitted ancestors.'''
-        visit = parents.keys()
-        seen = {}
-        children = {}
-
-        while visit:
-            n = visit.pop(0)
-            if n in seen: continue
-            seen[n] = 1
-            # Ensure that nodes without parents are present in the 'children'
-            # mapping.
-            children.setdefault(n, [])
-            for p in parents[n]:
-                if not p in self.map:
-                    visit.append(p)
-                children.setdefault(p, []).append(n)
-
-        s = []
-        removed = {}
-        visit = children.keys()
-        while visit:
-            n = visit.pop(0)
-            if n in removed: continue
-            dep = 0
-            if n in parents:
-                for p in parents[n]:
-                    if p in self.map: continue
-                    if p not in removed:
-                        # we're still dependent
-                        visit.append(n)
-                        dep = 1
-                        break
+import convcmd
+from mercurial import commands
 
-            if not dep:
-                # all n's parents are in the list
-                removed[n] = 1
-                if n not in self.map:
-                    s.append(n)
-                if n in children:
-                    for c in children[n]:
-                        visit.insert(0, c)
-
-        if self.opts.get('datesort'):
-            depth = {}
-            for n in s:
-                depth[n] = 0
-                pl = [p for p in self.commitcache[n].parents
-                      if p not in self.map]
-                if pl:
-                    depth[n] = max([depth[p] for p in pl]) + 1
-
-            s = [(depth[n], self.commitcache[n].date, n) for n in s]
-            s.sort()
-            s = [e[2] for e in s]
-
-        return s
-
-    def mapentry(self, src, dst):
-        if self.mapfilefd is None:
-            try:
-                self.mapfilefd = open(self.mapfile, "a")
-            except IOError, (errno, strerror):
-                raise util.Abort("Could not open map file %s: %s, %s\n" % (self.mapfile, errno, strerror))
-        self.map[src] = dst
-        self.mapfilefd.write("%s %s\n" % (src, dst))
-        self.mapfilefd.flush()
-
-    def writeauthormap(self):
-        authorfile = self.authorfile
-        if authorfile:
-           self.ui.status('Writing author map file %s\n' % authorfile)
-           ofile = open(authorfile, 'w+')
-           for author in self.authors:
-               ofile.write("%s=%s\n" % (author, self.authors[author]))
-           ofile.close()
-
-    def readauthormap(self, authorfile):
-        afile = open(authorfile, 'r')
-        for line in afile:
-            try:
-                srcauthor = line.split('=')[0].strip()
-                dstauthor = line.split('=')[1].strip()
-                if srcauthor in self.authors and dstauthor != self.authors[srcauthor]:
-                    self.ui.status(
-                        'Overriding mapping for author %s, was %s, will be %s\n'
-                        % (srcauthor, self.authors[srcauthor], dstauthor))
-                else:
-                    self.ui.debug('Mapping author %s to %s\n'
-                                  % (srcauthor, dstauthor))
-                    self.authors[srcauthor] = dstauthor
-            except IndexError:
-                self.ui.warn(
-                    'Ignoring bad line in author file map %s: %s\n'
-                    % (authorfile, line))
-        afile.close()
-
-    def copy(self, rev):
-        c = self.commitcache[rev]
-        files = self.source.getchanges(rev)
+# Commands definition was moved elsewhere to ease demandload job.
 
-        do_copies = (hasattr(c, 'copies') and hasattr(self.dest, 'copyfile'))
-
-        for f, v in files:
-            try:
-                data = self.source.getfile(f, v)
-            except IOError, inst:
-                self.dest.delfile(f)
-            else:
-                e = self.source.getmode(f, v)
-                self.dest.putfile(f, e, data)
-                if do_copies:
-                    if f in c.copies:
-                        # Merely marks that a copy happened.
-                        self.dest.copyfile(c.copies[f], f)
-
-
-        r = [self.map[v] for v in c.parents]
-        f = [f for f, v in files]
-        newnode = self.dest.putcommit(f, r, c)
-        self.mapentry(rev, newnode)
-
-    def convert(self):
-        try:
-            self.source.setrevmap(self.map)
-            self.ui.status("scanning source...\n")
-            heads = self.source.getheads()
-            parents = self.walktree(heads)
-            self.ui.status("sorting...\n")
-            t = self.toposort(parents)
-            num = len(t)
-            c = None
-
-            self.ui.status("converting...\n")
-            for c in t:
-                num -= 1
-                desc = self.commitcache[c].desc
-                if "\n" in desc:
-                    desc = desc.splitlines()[0]
-                author = self.commitcache[c].author
-                author = self.authors.get(author, author)
-                self.commitcache[c].author = author
-                self.ui.status("%d %s\n" % (num, desc))
-                self.copy(c)
-
-            tags = self.source.gettags()
-            ctags = {}
-            for k in tags:
-                v = tags[k]
-                if v in self.map:
-                    ctags[k] = self.map[v]
-
-            if c and ctags:
-                nrev = self.dest.puttags(ctags)
-                # write another hash correspondence to override the previous
-                # one so we don't end up with extra tag heads
-                if nrev:
-                    self.mapentry(c, nrev)
-
-            self.writeauthormap()
-        finally:
-            self.cleanup()
-
-    def cleanup(self):
-       if self.mapfilefd:
-           self.mapfilefd.close()
-
-def _convert(ui, src, dest=None, mapfile=None, **opts):
+def convert(ui, src, dest=None, revmapfile=None, **opts):
     """Convert a foreign SCM repository to a Mercurial one.
 
     Accepted source formats:
-    - GIT
+    - Mercurial
     - CVS
-    - SVN
+    - Darcs
+    - git
+    - Subversion
+    - GNU Arch
 
     Accepted destination formats:
     - Mercurial
+    - Subversion (history on branches is not preserved)
 
     If no revision is given, all revisions will be converted. Otherwise,
     convert will only import up to the named revision (given in a format
@@ -278,8 +33,8 @@
     basename of the source with '-hg' appended.  If the destination
     repository doesn't exist, it will be created.
 
-    If <mapfile> isn't given, it will be put in a default location
-    (<dest>/.hg/shamap by default).  The <mapfile> is a simple text
+    If <MAPFILE> isn't given, it will be put in a default location
+    (<dest>/.hg/shamap by default).  The <MAPFILE> is a simple text
     file that maps each source commit ID to the destination ID for
     that revision, like so:
     <source ID> <destination ID>
@@ -293,61 +48,62 @@
     that use unix logins to identify authors (eg: CVS). One line per author
     mapping and the line format is:
     srcauthor=whatever string you want
-    """
 
-    util._encoding = 'UTF-8'
+    The filemap is a file that allows filtering and remapping of files
+    and directories.  Comment lines start with '#'.  Each line can
+    contain one of the following directives:
+
+      include path/to/file
+
+      exclude path/to/file
 
-    if not dest:
-        dest = hg.defaultdest(src) + "-hg"
-        ui.status("assuming destination %s\n" % dest)
+      rename from/file to/file
+
+    The 'include' directive causes a file, or all files under a
+    directory, to be included in the destination repository, and the
+    exclusion of all other files and dirs not explicitely included.
+    The 'exclude' directive causes files or directories to be omitted.
+    The 'rename' directive renames a file or directory.  To rename from a
+    subdirectory into the root of the repository, use '.' as the path to
+    rename to.
+
+    Back end options:
 
-    # Try to be smart and initalize things when required
-    created = False
-    if os.path.isdir(dest):
-        if len(os.listdir(dest)) > 0:
-            try:
-                hg.repository(ui, dest)
-                ui.status("destination %s is a Mercurial repository\n" % dest)
-            except hg.RepoError:
-                raise util.Abort(
-                    "destination directory %s is not empty.\n"
-                    "Please specify an empty directory to be initialized\n"
-                    "or an already initialized mercurial repository"
-                    % dest)
-        else:
-            ui.status("initializing destination %s repository\n" % dest)
-            hg.repository(ui, dest, create=True)
-            created = True
-    elif os.path.exists(dest):
-        raise util.Abort("destination %s exists and is not a directory" % dest)
-    else:
-        ui.status("initializing destination %s repository\n" % dest)
-        hg.repository(ui, dest, create=True)
-        created = True
+    --config convert.hg.clonebranches=False   (boolean)
+        hg target: XXX not documented
+    --config convert.hg.saverev=True          (boolean)
+        hg source: allow target to preserve source revision ID
+    --config convert.hg.tagsbranch=default    (branch name)
+        hg target: XXX not documented
+    --config convert.hg.usebranchnames=True   (boolean)
+        hg target: preserve branch names
 
-    destc = convertsink(ui, dest)
-
-    try:
-        srcc = convertsource(ui, src, rev=opts.get('rev'))
-    except Exception:
-        if created:
-            shutil.rmtree(dest, True)
-        raise
+    --config convert.svn.branches=branches    (directory name)
+        svn source: specify the directory containing branches
+    --config convert.svn.tags=tags            (directory name)
+        svn source: specify the directory containing tags
+    --config convert.svn.trunk=trunk          (directory name)
+        svn source: specify the name of the trunk branch
+    """
+    return convcmd.convert(ui, src, dest, revmapfile, **opts)
 
-    if not mapfile:
-        try:
-            mapfile = destc.mapfile()
-        except:
-            mapfile = os.path.join(destc, "map")
+def debugsvnlog(ui, **opts):
+    return convcmd.debugsvnlog(ui, **opts)
 
-    c = convert(ui, srcc, destc, mapfile, opts)
-    c.convert()
+commands.norepo += " convert debugsvnlog"
 
 cmdtable = {
     "convert":
-        (_convert,
+        (convert,
          [('A', 'authors', '', 'username mapping filename'),
+          ('d', 'dest-type', '', 'destination repository type'),
+          ('', 'filemap', '', 'remap file names using contents of file'),
           ('r', 'rev', '', 'import up to target revision REV'),
+          ('s', 'source-type', '', 'source repository type'),
           ('', 'datesort', None, 'try to sort changesets by date')],
          'hg convert [OPTION]... SOURCE [DEST [MAPFILE]]'),
+    "debugsvnlog":
+        (debugsvnlog,
+         [],
+         'hg debugsvnlog'),
 }
--- a/hgext/convert/common.py	Thu Jul 26 07:56:27 2007 -0400
+++ b/hgext/convert/common.py	Wed Feb 06 19:57:52 2008 -0800
@@ -1,23 +1,47 @@
 # common code for the convert extension
+import base64, errno
+import os
+import cPickle as pickle
+from mercurial import util
+from mercurial.i18n import _
+
+def encodeargs(args):
+    def encodearg(s):
+        lines = base64.encodestring(s)
+        lines = [l.splitlines()[0] for l in lines]
+        return ''.join(lines)
+
+    s = pickle.dumps(args)
+    return encodearg(s)
+
+def decodeargs(s):
+    s = base64.decodestring(s)
+    return pickle.loads(s)
+
+def checktool(exe, name=None):
+    name = name or exe
+    if not util.find_exe(exe):
+        raise util.Abort('cannot find required "%s" tool' % name)
 
 class NoRepo(Exception): pass
 
-class commit(object):
-    def __init__(self, **parts):
-        self.rev = None
-        self.branch = None
+SKIPREV = 'SKIP'
 
-        for x in "author date desc parents".split():
-            if not x in parts:
-                raise util.Abort("commit missing field %s" % x)
-        self.__dict__.update(parts)
-        if not self.desc or self.desc.isspace():
-            self.desc = '*** empty log message ***'
+class commit(object):
+    def __init__(self, author, date, desc, parents, branch=None, rev=None,
+                 extra={}):
+        self.author = author or 'unknown'
+        self.date = date or '0 0'
+        self.desc = desc
+        self.parents = parents
+        self.branch = branch
+        self.rev = rev
+        self.extra = extra
 
 class converter_source(object):
     """Conversion source interface"""
 
-    def __init__(self, ui, path, rev=None):
+    def __init__(self, ui, path=None, rev=None):
         """Initialize conversion source (or raise NoRepo("message")
         exception if path is not a valid repository)"""
         self.ui = ui
@@ -26,6 +50,12 @@
 
         self.encoding = 'utf-8'
 
+    def before(self):
+        pass
+
+    def after(self):
+        pass
+
     def setrevmap(self, revmap):
         """set the map of already-converted revisions"""
         pass
@@ -43,10 +73,12 @@
         raise NotImplementedError()
 
     def getchanges(self, version):
-        """Return sorted list of (filename, id) tuples for all files changed in rev.
+        """Returns a tuple of (files, copies)
+        Files is a sorted list of (filename, id) tuples for all files changed
+        in version, where id is the source revision id of the file.
 
-        id just tells us which revision to return in getfile(), e.g. in
-        git it's an object hash."""
+        copies is a dictionary of dest: source
+        """
         raise NotImplementedError()
 
     def getcommit(self, version):
@@ -61,6 +93,8 @@
         if not encoding:
             encoding = self.encoding or 'utf-8'
 
+        if isinstance(s, unicode):
+            return s.encode("utf-8")
         try:
             return s.decode(encoding).encode("utf-8")
         except:
@@ -69,19 +103,42 @@
             except:
                 return s.decode(encoding, "replace").encode("utf-8")
 
+    def getchangedfiles(self, rev, i):
+        """Return the files changed by rev compared to parent[i].
+
+        i is an index selecting one of the parents of rev.  The return
+        value should be the list of files that are different in rev and
+        this parent.
+
+        If rev has no parents, i is None.
+
+        This function is only needed to support --filemap
+        """
+        raise NotImplementedError()
+
+    def converted(self, rev, sinkrev):
+        '''Notify the source that a revision has been converted.'''
+        pass
+
+
 class converter_sink(object):
     """Conversion sink (target) interface"""
 
     def __init__(self, ui, path):
         """Initialize conversion sink (or raise NoRepo("message")
-        exception if path is not a valid repository)"""
-        raise NotImplementedError()
+        exception if path is not a valid repository)
+
+        created is a list of paths to remove if a fatal error occurs
+        later"""
+        self.ui = ui
+        self.path = path
+        self.created = []
 
     def getheads(self):
         """Return a list of this repository's heads"""
         raise NotImplementedError()
 
-    def mapfile(self):
+    def revmapfile(self):
         """Path to a file that will contain lines
         source_rev_id sink_rev_id
         mapping equivalent revision identifiers for each system."""
@@ -119,3 +176,179 @@
         """Put tags into sink.
         tags: {tagname: sink_rev_id, ...}"""
         raise NotImplementedError()
+
+    def setbranch(self, branch, pbranches):
+        """Set the current branch name. Called before the first putfile
+        on the branch.
+        branch: branch name for subsequent commits
+        pbranches: (converted parent revision, parent branch) tuples"""
+        pass
+
+    def setfilemapmode(self, active):
+        """Tell the destination that we're using a filemap
+
+        Some converter_sources (svn in particular) can claim that a file
+        was changed in a revision, even if there was no change.  This method
+        tells the destination that we're using a filemap and that it should
+        filter empty revisions.
+        """
+        pass
+
+    def before(self):
+        pass
+
+    def after(self):
+        pass
+
+
+class commandline(object):
+    def __init__(self, ui, command):
+        self.ui = ui
+        self.command = command
+
+    def prerun(self):
+        pass
+
+    def postrun(self):
+        pass
+
+    def _cmdline(self, cmd, *args, **kwargs):
+        cmdline = [self.command, cmd] + list(args)
+        for k, v in kwargs.iteritems():
+            if len(k) == 1:
+                cmdline.append('-' + k)
+            else:
+                cmdline.append('--' + k.replace('_', '-'))
+            try:
+                if len(k) == 1:
+                    cmdline.append('' + v)
+                else:
+                    cmdline[-1] += '=' + v
+            except TypeError:
+                pass
+        cmdline = [util.shellquote(arg) for arg in cmdline]
+        cmdline += ['2>', util.nulldev, '<', util.nulldev]
+        cmdline = ' '.join(cmdline)
+        self.ui.debug(cmdline, '\n')
+        return cmdline
+
+    def _run(self, cmd, *args, **kwargs):
+        cmdline = self._cmdline(cmd, *args, **kwargs)
+        self.prerun()
+        try:
+            return util.popen(cmdline)
+        finally:
+            self.postrun()
+
+    def run(self, cmd, *args, **kwargs):
+        fp = self._run(cmd, *args, **kwargs)
+        output = fp.read()
+        self.ui.debug(output)
+        return output, fp.close()
+
+    def runlines(self, cmd, *args, **kwargs):
+        fp = self._run(cmd, *args, **kwargs)
+        output = fp.readlines()
+        self.ui.debug(output)
+        return output, fp.close()
+
+    def checkexit(self, status, output=''):
+        if status:
+            if output:
+                self.ui.warn(_('%s error:\n') % self.command)
+                self.ui.warn(output)
+            msg = util.explain_exit(status)[0]
+            raise util.Abort(_('%s %s') % (self.command, msg))
+
+    def run0(self, cmd, *args, **kwargs):
+        output, status = self.run(cmd, *args, **kwargs)
+        self.checkexit(status, output)
+        return output
+
+    def runlines0(self, cmd, *args, **kwargs):
+        output, status = self.runlines(cmd, *args, **kwargs)
+        self.checkexit(status, output)
+        return output
+
+    def getargmax(self):
+        if '_argmax' in self.__dict__:
+            return self._argmax
+
+        # POSIX requires at least 4096 bytes for ARG_MAX
+        self._argmax = 4096
+        try:
+            self._argmax = os.sysconf("SC_ARG_MAX")
+        except:
+            pass
+
+        # Windows shells impose their own limits on command line length,
+        # down to 2047 bytes for cmd.exe under Windows NT/2k and 2500 bytes
+        # for older 4nt.exe. See http://support.microsoft.com/kb/830473 for
+        # details about cmd.exe limitations.
+
+        # Since ARG_MAX is for command line _and_ environment, lower our limit
+        # (and make happy Windows shells while doing this).
+
+        self._argmax = self._argmax/2 - 1
+        return self._argmax
+
+    def limit_arglist(self, arglist, cmd, *args, **kwargs):
+        limit = self.getargmax() - len(self._cmdline(cmd, *args, **kwargs))
+        bytes = 0
+        fl = []
+        for fn in arglist:
+            b = len(fn) + 3
+            if bytes + b < limit or len(fl) == 0:
+                fl.append(fn)
+                bytes += b
+            else:
+                yield fl
+                fl = [fn]
+                bytes = b
+        if fl:
+            yield fl
+
+    def xargs(self, arglist, cmd, *args, **kwargs):
+        for l in self.limit_arglist(arglist, cmd, *args, **kwargs):
+            self.run0(cmd, *(list(args) + l), **kwargs)
+
+class mapfile(dict):
+    def __init__(self, ui, path):
+        super(mapfile, self).__init__()
+        self.ui = ui
+        self.path = path
+        self.fp = None
+        self.order = []
+        self._read()
+
+    def _read(self):
+        if self.path is None:
+            return
+        try:
+            fp = open(self.path, 'r')
+        except IOError, err:
+            if err.errno != errno.ENOENT:
+                raise
+            return
+        for line in fp:
+            key, value = line[:-1].split(' ', 1)
+            if key not in self:
+                self.order.append(key)
+            super(mapfile, self).__setitem__(key, value)
+        fp.close()
+
+    def __setitem__(self, key, value):
+        if self.fp is None:
+            try:
+                self.fp = open(self.path, 'a')
+            except IOError, err:
+                raise util.Abort(_('could not open map file %r: %s') %
+                                 (self.path, err.strerror))
+        self.fp.write('%s %s\n' % (key, value))
+        self.fp.flush()
+        super(mapfile, self).__setitem__(key, value)
+
+    def close(self):
+        if self.fp:
+            self.fp.close()
+            self.fp = None
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/hgext/convert/convcmd.py	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,330 @@
+# convcmd - convert extension commands definition
+#
+# Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
+#
+# This software may be used and distributed according to the terms
+# of the GNU General Public License, incorporated herein by reference.
+
+from common import NoRepo, SKIPREV, converter_source, converter_sink, mapfile
+from cvs import convert_cvs
+from darcs import darcs_source
+from git import convert_git
+from hg import mercurial_source, mercurial_sink
+from subversion import debugsvnlog, svn_source, svn_sink
+from gnuarch import gnuarch_source
+import filemap
+
+import os, shutil
+from mercurial import hg, util
+from mercurial.i18n import _
+
+source_converters = [
+    ('cvs', convert_cvs),
+    ('git', convert_git),
+    ('svn', svn_source),
+    ('hg', mercurial_source),
+    ('darcs', darcs_source),
+    ('gnuarch', gnuarch_source),
+    ]
+
+sink_converters = [
+    ('hg', mercurial_sink),
+    ('svn', svn_sink),
+    ]
+
+def convertsource(ui, path, type, rev):
+    exceptions = []
+    for name, source in source_converters:
+        try:
+            if not type or name == type:
+                return source(ui, path, rev)
+        except NoRepo, inst:
+            exceptions.append(inst)
+    if not ui.quiet:
+        for inst in exceptions:
+            ui.write(_("%s\n") % inst)
+    raise util.Abort('%s: unknown repository type' % path)
+
+def convertsink(ui, path, type):
+    for name, sink in sink_converters:
+        try:
+            if not type or name == type:
+                return sink(ui, path)
+        except NoRepo, inst:
+            ui.note(_("convert: %s\n") % inst)
+    raise util.Abort('%s: unknown repository type' % path)
+
+class converter(object):
+    def __init__(self, ui, source, dest, revmapfile, opts):
+
+        self.source = source
+        self.dest = dest
+        self.ui = ui
+        self.opts = opts
+        self.commitcache = {}
+        self.authors = {}
+        self.authorfile = None
+
+        self.map = mapfile(ui, revmapfile)
+
+        # Read first the dst author map if any
+        authorfile = self.dest.authorfile()
+        if authorfile and os.path.exists(authorfile):
+            self.readauthormap(authorfile)
+        # Extend/Override with new author map if necessary
+        if opts.get('authors'):
+            self.readauthormap(opts.get('authors'))
+            self.authorfile = self.dest.authorfile()
+
+        self.splicemap = mapfile(ui, ui.config('convert', 'splicemap'))
+
+    def walktree(self, heads):
+        '''Return a mapping that identifies the uncommitted parents of every
+        uncommitted changeset.'''
+        visit = heads
+        known = {}
+        parents = {}
+        while visit:
+            n = visit.pop(0)
+            if n in known or n in self.map: continue
+            known[n] = 1
+            commit = self.cachecommit(n)
+            parents[n] = []
+            for p in commit.parents:
+                parents[n].append(p)
+                visit.append(p)
+
+        return parents
+
+    def toposort(self, parents):
+        '''Return an ordering such that every uncommitted changeset is
+        preceeded by all its uncommitted ancestors.'''
+        visit = parents.keys()
+        seen = {}
+        children = {}
+
+        while visit:
+            n = visit.pop(0)
+            if n in seen: continue
+            seen[n] = 1
+            # Ensure that nodes without parents are present in the 'children'
+            # mapping.
+            children.setdefault(n, [])
+            for p in parents[n]:
+                if not p in self.map:
+                    visit.append(p)
+                children.setdefault(p, []).append(n)
+
+        s = []
+        removed = {}
+        visit = children.keys()
+        while visit:
+            n = visit.pop(0)
+            if n in removed: continue
+            dep = 0
+            if n in parents:
+                for p in parents[n]:
+                    if p in self.map: continue
+                    if p not in removed:
+                        # we're still dependent
+                        visit.append(n)
+                        dep = 1
+                        break
+
+            if not dep:
+                # all n's parents are in the list
+                removed[n] = 1
+                if n not in self.map:
+                    s.append(n)
+                if n in children:
+                    for c in children[n]:
+                        visit.insert(0, c)
+
+        if self.opts.get('datesort'):
+            depth = {}
+            for n in s:
+                depth[n] = 0
+                pl = [p for p in self.commitcache[n].parents
+                      if p not in self.map]
+                if pl:
+                    depth[n] = max([depth[p] for p in pl]) + 1
+
+            s = [(depth[n], util.parsedate(self.commitcache[n].date), n)
+                 for n in s]
+            s.sort()
+            s = [e[2] for e in s]
+
+        return s
+
+    def writeauthormap(self):
+        authorfile = self.authorfile
+        if authorfile:
+           self.ui.status('Writing author map file %s\n' % authorfile)
+           ofile = open(authorfile, 'w+')
+           for author in self.authors:
+               ofile.write("%s=%s\n" % (author, self.authors[author]))
+           ofile.close()
+
+    def readauthormap(self, authorfile):
+        afile = open(authorfile, 'r')
+        for line in afile:
+            try:
+                srcauthor = line.split('=')[0].strip()
+                dstauthor = line.split('=')[1].strip()
+                if srcauthor in self.authors and dstauthor != self.authors[srcauthor]:
+                    self.ui.status(
+                        'Overriding mapping for author %s, was %s, will be %s\n'
+                        % (srcauthor, self.authors[srcauthor], dstauthor))
+                else:
+                    self.ui.debug('Mapping author %s to %s\n'
+                                  % (srcauthor, dstauthor))
+                    self.authors[srcauthor] = dstauthor
+            except IndexError:
+                self.ui.warn(
+                    'Ignoring bad line in author file map %s: %s\n'
+                    % (authorfile, line))
+        afile.close()
+
+    def cachecommit(self, rev):
+        commit = self.source.getcommit(rev)
+        commit.author = self.authors.get(commit.author, commit.author)
+        self.commitcache[rev] = commit
+        return commit
+
+    def copy(self, rev):
+        commit = self.commitcache[rev]
+        do_copies = hasattr(self.dest, 'copyfile')
+        filenames = []
+
+        changes = self.source.getchanges(rev)
+        if isinstance(changes, basestring):
+            if changes == SKIPREV:
+                dest = SKIPREV
+            else:
+                dest = self.map[changes]
+            self.map[rev] = dest
+            return
+        files, copies = changes
+        pbranches = []
+        if commit.parents:
+            for prev in commit.parents:
+                if prev not in self.commitcache:
+                    self.cachecommit(prev)
+                pbranches.append((self.map[prev], 
+                                  self.commitcache[prev].branch))
+        self.dest.setbranch(commit.branch, pbranches)
+        for f, v in files:
+            filenames.append(f)
+            try:
+                data = self.source.getfile(f, v)
+            except IOError, inst:
+                self.dest.delfile(f)
+            else:
+                e = self.source.getmode(f, v)
+                self.dest.putfile(f, e, data)
+                if do_copies:
+                    if f in copies:
+                        copyf = copies[f]
+                        # Merely marks that a copy happened.
+                        self.dest.copyfile(copyf, f)
+
+        try:
+            parents = [self.splicemap[rev]]
+            self.ui.debug('spliced in %s as parents of %s\n' %
+                          (parents, rev))
+        except KeyError:
+            parents = [b[0] for b in pbranches]
+        newnode = self.dest.putcommit(filenames, parents, commit)
+        self.source.converted(rev, newnode)
+        self.map[rev] = newnode
+
+    def convert(self):
+
+        def recode(s):
+            return s.decode('utf-8').encode(orig_encoding, 'replace')
+
+        try:
+            self.source.before()
+            self.dest.before()
+            self.source.setrevmap(self.map)
+            self.ui.status("scanning source...\n")
+            heads = self.source.getheads()
+            parents = self.walktree(heads)
+            self.ui.status("sorting...\n")
+            t = self.toposort(parents)
+            num = len(t)
+            c = None
+
+            self.ui.status("converting...\n")
+            for c in t:
+                num -= 1
+                desc = self.commitcache[c].desc
+                if "\n" in desc:
+                    desc = desc.splitlines()[0]
+                # convert log message to local encoding without using
+                # tolocal() because util._encoding conver() use it as
+                # 'utf-8'
+                self.ui.status("%d %s\n" % (num, recode(desc)))
+                self.ui.note(_("source: %s\n" % recode(c)))
+                self.copy(c)
+
+            tags = self.source.gettags()
+            ctags = {}
+            for k in tags:
+                v = tags[k]
+                if self.map.get(v, SKIPREV) != SKIPREV:
+                    ctags[k] = self.map[v]
+
+            if c and ctags:
+                nrev = self.dest.puttags(ctags)
+                # write another hash correspondence to override the previous
+                # one so we don't end up with extra tag heads
+                if nrev:
+                    self.map[c] = nrev
+
+            self.writeauthormap()
+        finally:
+            self.cleanup()
+
+    def cleanup(self):
+        try:
+            self.dest.after()
+        finally:
+            self.source.after()
+        self.map.close()
+
+orig_encoding = 'ascii'
+
+def convert(ui, src, dest=None, revmapfile=None, **opts):
+    global orig_encoding
+    orig_encoding = util._encoding
+    util._encoding = 'UTF-8'
+
+    if not dest:
+        dest = hg.defaultdest(src) + "-hg"
+        ui.status("assuming destination %s\n" % dest)
+
+    destc = convertsink(ui, dest, opts.get('dest_type'))
+
+    try:
+        srcc = convertsource(ui, src, opts.get('source_type'),
+                             opts.get('rev'))
+    except Exception:
+        for path in destc.created:
+            shutil.rmtree(path, True)
+        raise
+
+    fmap = opts.get('filemap')
+    if fmap:
+        srcc = filemap.filemap_source(ui, srcc, fmap)
+        destc.setfilemapmode(True)
+
+    if not revmapfile:
+        try:
+            revmapfile = destc.revmapfile()
+        except:
+            revmapfile = os.path.join(destc, "map")
+
+    c = converter(ui, srcc, destc, revmapfile, opts)
+    c.convert()
+
--- a/hgext/convert/cvs.py	Thu Jul 26 07:56:27 2007 -0400
+++ b/hgext/convert/cvs.py	Wed Feb 06 19:57:52 2008 -0800
@@ -1,9 +1,10 @@
 # CVS conversion code inspired by hg-cvs-import and git-cvsimport
 
 import os, locale, re, socket
+from cStringIO import StringIO
 from mercurial import util
 
-from common import NoRepo, commit, converter_source
+from common import NoRepo, commit, converter_source, checktool
 
 class convert_cvs(converter_source):
     def __init__(self, ui, path, rev=None):
@@ -11,7 +12,10 @@
 
         cvs = os.path.join(path, "CVS")
         if not os.path.exists(cvs):
-            raise NoRepo("couldn't open CVS repo %s" % path)
+            raise NoRepo("%s does not look like a CVS checkout" % path)
+
+        for tool in ('cvsps', 'cvs'):
+            checktool(tool)
 
         self.changeset = {}
         self.files = {}
@@ -40,7 +44,7 @@
                 try:
                     # date
                     util.parsedate(self.rev, ['%Y/%m/%d %H:%M:%S'])
-                    cmd = "%s -d '1970/01/01 00:00:01' -d '%s'" % (cmd, self.rev)
+                    cmd = '%s -d "1970/01/01 00:00:01" -d "%s"' % (cmd, self.rev)
                 except util.Abort:
                     raise util.Abort('revision %s is not a patchset number or date' % self.rev)
 
@@ -49,11 +53,13 @@
             os.chdir(self.path)
             id = None
             state = 0
-            for l in os.popen(cmd):
+            filerevids = {}
+            for l in util.popen(cmd):
                 if state == 0: # header
                     if l.startswith("PatchSet"):
                         id = l[9:-2]
                         if maxrev and int(id) > maxrev:
+                            # ignore everything
                             state = 3
                     elif l.startswith("Date"):
                         date = util.parsedate(l[6:-1], ["%Y/%m/%d %H:%M:%S"])
@@ -64,7 +70,8 @@
                         self.lastbranch[branch] = id
                     elif l.startswith("Ancestor branch"):
                         ancestor = l[17:-1]
-                        self.parent[id] = self.lastbranch[ancestor]
+                        # figure out the parent later
+                        self.parent[id] = None
                     elif l.startswith("Author"):
                         author = self.recode(l[8:-1])
                     elif l.startswith("Tag:") or l.startswith("Tags:"):
@@ -73,23 +80,36 @@
                         if (len(t) > 1) or (t[0] and (t[0] != "(none)")):
                             self.tags.update(dict.fromkeys(t, id))
                     elif l.startswith("Log:"):
+                        # switch to gathering log
                         state = 1
                         log = ""
                 elif state == 1: # log
                     if l == "Members: \n":
+                        # switch to gathering members
                         files = {}
+                        oldrevs = []
                         log = self.recode(log[:-1])
                         state = 2
                     else:
+                        # gather log
                         log += l
-                elif state == 2:
-                    if l == "\n": #
+                elif state == 2: # members
+                    if l == "\n": # start of next entry
                         state = 0
                         p = [self.parent[id]]
                         if id == "1":
                             p = []
                         if branch == "HEAD":
                             branch = ""
+                        if branch and p[0] == None:
+                            latest = None
+                            # the last changeset that contains a base
+                            # file is our parent
+                            for r in oldrevs:
+                                latest = max(filerevids[r], latest)
+                            p = [latest]
+
+                        # add current commit to set
                         c = commit(author=author, date=date, parents=p,
                                    desc=log, branch=branch)
                         self.changeset[id] = c
@@ -98,9 +118,14 @@
                         colon = l.rfind(':')
                         file = l[1:colon]
                         rev = l[colon+1:-2]
-                        rev = rev.split("->")[1]
+                        oldrev, rev = rev.split("->")
                         files[file] = rev
+
+                        # save some information for identifying branch points
+                        oldrevs.append("%s:%s" % (oldrev, file))
+                        filerevids["%s:%s" % (rev, file)] = id
                 elif state == 3:
+                    # swallow all input
                     continue
 
             self.heads = self.lastbranch.values()
@@ -124,23 +149,27 @@
                 user, passw, serv, port, root = m.groups()
                 if not user:
                     user = "anonymous"
-                rr = ":pserver:" + user + "@" + serv + ":" +  root
-                if port:
-                    rr2, port = "-", int(port)
+                if not port:
+                    port = 2401
                 else:
-                    rr2, port = rr, 2401
-                rr += str(port)
+                    port = int(port)
+                format0 = ":pserver:%s@%s:%s" % (user, serv, root)
+                format1 = ":pserver:%s@%s:%d%s" % (user, serv, port, root)
 
                 if not passw:
                     passw = "A"
                     pf = open(os.path.join(os.environ["HOME"], ".cvspass"))
-                    for l in pf:
-                        # :pserver:cvs@mea.tmt.tele.fi:/cvsroot/zmailer Ah<Z
-                        m = re.match(r'(/\d+\s+/)?(.*)', l)
-                        l = m.group(2)
-                        w, p = l.split(' ', 1)
-                        if w in [rr, rr2]:
-                            passw = p
+                    for line in pf.read().splitlines():
+                        part1, part2 = line.split(' ', 1)
+                        if part1 == '/1':
+                            # /1 :pserver:user@example.com:2401/cvsroot/foo Ah<Z
+                            part1, part2 = part2.split(' ', 1)
+                            format = format1
+                        else:
+                            # :pserver:user@example.com:/cvsroot/foo Ah<Z
+                            format = format0
+                        if part1 == format:
+                            passw = part2
                             break
                     pf.close()
 
@@ -149,7 +178,7 @@
                 sck.send("\n".join(["BEGIN AUTH REQUEST", root, user, passw,
                                     "END AUTH REQUEST", ""]))
                 if sck.recv(128) != "I LOVE YOU\n":
-                    raise NoRepo("CVS pserver authentication failed")
+                    raise util.Abort("CVS pserver authentication failed")
 
                 self.writep = self.readp = sck.makefile('r+')
 
@@ -162,7 +191,8 @@
             if root.startswith(":ext:"):
                 root = root[5:]
             m = re.match(r'(?:([^@:/]+)@)?([^:/]+):?(.*)', root)
-            if not m:
+            # Do not take Windows path "c:\foo\bar" for a connection strings
+            if os.path.isdir(root) or not m:
                 conntype = "local"
             else:
                 conntype = "rsh"
@@ -170,13 +200,16 @@
 
         if conntype != "pserver":
             if conntype == "rsh":
-                rsh = os.environ.get("CVS_RSH" or "rsh")
+                rsh = os.environ.get("CVS_RSH") or "ssh"
                 if user:
                     cmd = [rsh, '-l', user, host] + cmd
                 else:
                     cmd = [rsh, host] + cmd
 
-            self.writep, self.readp = os.popen2(cmd)
+            # popen2 does not support argument lists under Windows
+            cmd = [util.shellquote(arg) for arg in cmd]
+            cmd = util.quotecommand(' '.join(cmd))
+            self.writep, self.readp = os.popen2(cmd, 'b')
 
         self.realroot = root
 
@@ -198,11 +231,25 @@
         return self.heads
 
     def _getfile(self, name, rev):
+
+        def chunkedread(fp, count):
+            # file-objects returned by socked.makefile() do not handle
+            # large read() requests very well.
+            chunksize = 65536
+            output = StringIO()
+            while count > 0:
+                data = fp.read(min(count, chunksize))
+                if not data:
+                    raise util.Abort("%d bytes missing from remote file" % count)
+                count -= len(data)
+                output.write(data)
+            return output.getvalue()
+
         if rev.endswith("(DEAD)"):
             raise IOError
 
         args = ("-N -P -kk -r %s --" % rev).split()
-        args.append(os.path.join(self.cvsrepo, name))
+        args.append(self.cvsrepo + '/' + name)
         for x in args:
             self.writep.write("Argument %s\n" % x)
         self.writep.write("Directory .\n%s\nco\n" % self.realroot)
@@ -216,14 +263,14 @@
                 self.readp.readline() # entries
                 mode = self.readp.readline()[:-1]
                 count = int(self.readp.readline()[:-1])
-                data = self.readp.read(count)
+                data = chunkedread(self.readp, count)
             elif line.startswith(" "):
                 data += line[1:]
             elif line.startswith("M "):
                 pass
             elif line.startswith("Mbinary "):
                 count = int(self.readp.readline()[:-1])
-                data = self.readp.read(count)
+                data = chunkedread(self.readp, count)
             else:
                 if line == "ok\n":
                     return (data, "x" in mode and "x" or "")
@@ -250,10 +297,16 @@
         files = self.files[rev]
         cl = files.items()
         cl.sort()
-        return cl
+        return (cl, {})
 
     def getcommit(self, rev):
         return self.changeset[rev]
 
     def gettags(self):
         return self.tags
+
+    def getchangedfiles(self, rev, i):
+        files = self.files[rev].keys()
+        files.sort()
+        return files
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/hgext/convert/darcs.py	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,127 @@
+# darcs support for the convert extension
+
+from common import NoRepo, checktool, commandline, commit, converter_source
+from mercurial.i18n import _
+from mercurial import util
+import os, shutil, tempfile
+
+# The naming drift of ElementTree is fun!
+
+try: from xml.etree.cElementTree import ElementTree
+except ImportError:
+    try: from xml.etree.ElementTree import ElementTree
+    except ImportError:
+        try: from elementtree.cElementTree import ElementTree
+        except ImportError:
+            try: from elementtree.ElementTree import ElementTree
+            except ImportError: ElementTree = None
+
+
+class darcs_source(converter_source, commandline):
+    def __init__(self, ui, path, rev=None):
+        converter_source.__init__(self, ui, path, rev=rev)
+        commandline.__init__(self, ui, 'darcs')
+
+        # check for _darcs, ElementTree, _darcs/inventory so that we can
+        # easily skip test-convert-darcs if ElementTree is not around
+        if not os.path.exists(os.path.join(path, '_darcs')):
+            raise NoRepo("%s does not look like a darcs repo" % path)
+
+        checktool('darcs')
+
+        if ElementTree is None:
+            raise util.Abort(_("Python ElementTree module is not available"))
+
+        if not os.path.exists(os.path.join(path, '_darcs', 'inventory')):
+            raise NoRepo("%s does not look like a darcs repo" % path)
+
+        self.path = os.path.realpath(path)
+
+        self.lastrev = None
+        self.changes = {}
+        self.parents = {}
+        self.tags = {}
+
+    def before(self):
+        self.tmppath = tempfile.mkdtemp(
+            prefix='convert-' + os.path.basename(self.path) + '-')
+        output, status = self.run('init', repodir=self.tmppath)
+        self.checkexit(status)
+
+        tree = self.xml('changes', xml_output=True, summary=True,
+                        repodir=self.path)
+        tagname = None
+        child = None
+        for elt in tree.findall('patch'):
+            node = elt.get('hash')
+            name = elt.findtext('name', '')
+            if name.startswith('TAG '):
+                tagname = name[4:].strip()
+            elif tagname is not None:
+                self.tags[tagname] = node
+                tagname = None
+            self.changes[node] = elt
+            self.parents[child] = [node]
+            child = node
+        self.parents[child] = []
+
+    def after(self):
+        self.ui.debug('cleaning up %s\n' % self.tmppath)
+        shutil.rmtree(self.tmppath, ignore_errors=True)
+
+    def xml(self, cmd, **kwargs):
+        etree = ElementTree()
+        fp = self._run(cmd, **kwargs)
+        etree.parse(fp)
+        self.checkexit(fp.close())
+        return etree.getroot()
+
+    def getheads(self):
+        return self.parents[None]
+
+    def getcommit(self, rev):
+        elt = self.changes[rev]
+        date = util.strdate(elt.get('local_date'), '%a %b %d %H:%M:%S %Z %Y')
+        desc = elt.findtext('name') + '\n' + elt.findtext('comment', '')
+        return commit(author=elt.get('author'), date=util.datestr(date),
+                      desc=desc.strip(), parents=self.parents[rev])
+
+    def pull(self, rev):
+        output, status = self.run('pull', self.path, all=True,
+                                  match='hash %s' % rev,
+                                  no_test=True, no_posthook=True,
+                                  external_merge='/bin/false',
+                                  repodir=self.tmppath)
+        if status:
+            if output.find('We have conflicts in') == -1:
+                self.checkexit(status, output)
+            output, status = self.run('revert', all=True, repodir=self.tmppath)
+            self.checkexit(status, output)
+
+    def getchanges(self, rev):
+        self.pull(rev)
+        copies = {}
+        changes = []
+        for elt in self.changes[rev].find('summary').getchildren():
+            if elt.tag in ('add_directory', 'remove_directory'):
+                continue
+            if elt.tag == 'move':
+                changes.append((elt.get('from'), rev))
+                copies[elt.get('from')] = elt.get('to')
+            else:
+                changes.append((elt.text.strip(), rev))
+        changes.sort()
+        self.lastrev = rev
+        return changes, copies
+
+    def getfile(self, name, rev):
+        if rev != self.lastrev:
+            raise util.Abort(_('internal calling inconsistency'))
+        return open(os.path.join(self.tmppath, name), 'rb').read()
+
+    def getmode(self, name, rev):
+        mode = os.lstat(os.path.join(self.tmppath, name)).st_mode
+        return (mode & 0111) and 'x' or ''
+
+    def gettags(self):
+        return self.tags
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/hgext/convert/filemap.py	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,353 @@
+# Copyright 2007 Bryan O'Sullivan <bos@serpentine.com>
+# Copyright 2007 Alexis S. L. Carvalho <alexis@cecm.usp.br>
+#
+# This software may be used and distributed according to the terms of
+# the GNU General Public License, incorporated herein by reference.
+
+import shlex
+from mercurial.i18n import _
+from mercurial import util
+from common import SKIPREV, converter_source
+
+def rpairs(name):
+    yield '.', name
+    e = len(name)
+    while e != -1:
+        yield name[:e], name[e+1:]
+        e = name.rfind('/', 0, e)
+
+class filemapper(object):
+    '''Map and filter filenames when importing.
+    A name can be mapped to itself, a new name, or None (omit from new
+    repository).'''
+
+    def __init__(self, ui, path=None):
+        self.ui = ui
+        self.include = {}
+        self.exclude = {}
+        self.rename = {}
+        if path:
+            if self.parse(path):
+                raise util.Abort(_('errors in filemap'))
+
+    def parse(self, path):
+        errs = 0
+        def check(name, mapping, listname):
+            if name in mapping:
+                self.ui.warn(_('%s:%d: %r already in %s list\n') %
+                             (lex.infile, lex.lineno, name, listname))
+                return 1
+            return 0
+        lex = shlex.shlex(open(path), path, True)
+        lex.wordchars += '!@#$%^&*()-=+[]{}|;:,./<>?'
+        cmd = lex.get_token()
+        while cmd:
+            if cmd == 'include':
+                name = lex.get_token()
+                errs += check(name, self.exclude, 'exclude')
+                self.include[name] = name
+            elif cmd == 'exclude':
+                name = lex.get_token()
+                errs += check(name, self.include, 'include')
+                errs += check(name, self.rename, 'rename')
+                self.exclude[name] = name
+            elif cmd == 'rename':
+                src = lex.get_token()
+                dest = lex.get_token()
+                errs += check(src, self.exclude, 'exclude')
+                self.rename[src] = dest
+            elif cmd == 'source':
+                errs += self.parse(lex.get_token())
+            else:
+                self.ui.warn(_('%s:%d: unknown directive %r\n') %
+                             (lex.infile, lex.lineno, cmd))
+                errs += 1
+            cmd = lex.get_token()
+        return errs
+
+    def lookup(self, name, mapping):
+        for pre, suf in rpairs(name):
+            try:
+                return mapping[pre], pre, suf
+            except KeyError, err:
+                pass
+        return '', name, ''
+
+    def __call__(self, name):
+        if self.include:
+            inc = self.lookup(name, self.include)[0]
+        else:
+            inc = name
+        if self.exclude:
+            exc = self.lookup(name, self.exclude)[0]
+        else:
+            exc = ''
+        if not inc or exc:
+            return None
+        newpre, pre, suf = self.lookup(name, self.rename)
+        if newpre:
+            if newpre == '.':
+                return suf
+            if suf:
+                return newpre + '/' + suf
+            return newpre
+        return name
+
+    def active(self):
+        return bool(self.include or self.exclude or self.rename)
+
+# This class does two additional things compared to a regular source:
+#
+# - Filter and rename files.  This is mostly wrapped by the filemapper
+#   class above. We hide the original filename in the revision that is
+#   returned by getchanges to be able to find things later in getfile
+#   and getmode.
+#
+# - Return only revisions that matter for the files we're interested in.
+#   This involves rewriting the parents of the original revision to
+#   create a graph that is restricted to those revisions.
+#
+#   This set of revisions includes not only revisions that directly
+#   touch files we're interested in, but also merges that merge two
+#   or more interesting revisions.
+
+class filemap_source(converter_source):
+    def __init__(self, ui, baseconverter, filemap):
+        super(filemap_source, self).__init__(ui)
+        self.base = baseconverter
+        self.filemapper = filemapper(ui, filemap)
+        self.commits = {}
+        # if a revision rev has parent p in the original revision graph, then
+        # rev will have parent self.parentmap[p] in the restricted graph.
+        self.parentmap = {}
+        # self.wantedancestors[rev] is the set of all ancestors of rev that
+        # are in the restricted graph.
+        self.wantedancestors = {}
+        self.convertedorder = None
+        self._rebuilt = False
+        self.origparents = {}
+        self.children = {}
+        self.seenchildren = {}
+
+    def before(self):
+        self.base.before()
+
+    def after(self):
+        self.base.after()
+
+    def setrevmap(self, revmap):
+        # rebuild our state to make things restartable
+        #
+        # To avoid calling getcommit for every revision that has already
+        # been converted, we rebuild only the parentmap, delaying the
+        # rebuild of wantedancestors until we need it (i.e. until a
+        # merge).
+        #
+        # We assume the order argument lists the revisions in
+        # topological order, so that we can infer which revisions were
+        # wanted by previous runs.
+        self._rebuilt = not revmap
+        seen = {SKIPREV: SKIPREV}
+        dummyset = util.set()
+        converted = []
+        for rev in revmap.order:
+            mapped = revmap[rev]
+            wanted = mapped not in seen
+            if wanted:
+                seen[mapped] = rev
+                self.parentmap[rev] = rev
+            else:
+                self.parentmap[rev] = seen[mapped]
+            self.wantedancestors[rev] = dummyset
+            arg = seen[mapped]
+            if arg == SKIPREV:
+                arg = None
+            converted.append((rev, wanted, arg))
+        self.convertedorder = converted
+        return self.base.setrevmap(revmap)
+
+    def rebuild(self):
+        if self._rebuilt:
+            return True
+        self._rebuilt = True
+        self.parentmap.clear()
+        self.wantedancestors.clear()
+        self.seenchildren.clear()
+        for rev, wanted, arg in self.convertedorder:
+            if rev not in self.origparents:
+                self.origparents[rev] = self.getcommit(rev).parents
+            if arg is not None:
+                self.children[arg] = self.children.get(arg, 0) + 1
+
+        for rev, wanted, arg in self.convertedorder:
+            parents = self.origparents[rev]
+            if wanted:
+                self.mark_wanted(rev, parents)
+            else:
+                self.mark_not_wanted(rev, arg)
+            self._discard(arg, *parents)
+
+        return True
+
+    def getheads(self):
+        return self.base.getheads()
+
+    def getcommit(self, rev):
+        # We want to save a reference to the commit objects to be able
+        # to rewrite their parents later on.
+        c = self.commits[rev] = self.base.getcommit(rev)
+        for p in c.parents:
+            self.children[p] = self.children.get(p, 0) + 1
+        return c
+
+    def _discard(self, *revs):
+        for r in revs:
+            if r is None:
+                continue
+            self.seenchildren[r] = self.seenchildren.get(r, 0) + 1
+            if self.seenchildren[r] == self.children[r]:
+                del self.wantedancestors[r]
+                del self.parentmap[r]
+                del self.seenchildren[r]
+                if self._rebuilt:
+                    del self.children[r]
+
+    def wanted(self, rev, i):
+        # Return True if we're directly interested in rev.
+        #
+        # i is an index selecting one of the parents of rev (if rev
+        # has no parents, i is None).  getchangedfiles will give us
+        # the list of files that are different in rev and in the parent
+        # indicated by i.  If we're interested in any of these files,
+        # we're interested in rev.
+        try:
+            files = self.base.getchangedfiles(rev, i)
+        except NotImplementedError:
+            raise util.Abort(_("source repository doesn't support --filemap"))
+        for f in files:
+            if self.filemapper(f):
+                return True
+        return False
+
+    def mark_not_wanted(self, rev, p):
+        # Mark rev as not interesting and update data structures.
+
+        if p is None:
+            # A root revision. Use SKIPREV to indicate that it doesn't
+            # map to any revision in the restricted graph.  Put SKIPREV
+            # in the set of wanted ancestors to simplify code elsewhere
+            self.parentmap[rev] = SKIPREV
+            self.wantedancestors[rev] = util.set((SKIPREV,))
+            return
+
+        # Reuse the data from our parent.
+        self.parentmap[rev] = self.parentmap[p]
+        self.wantedancestors[rev] = self.wantedancestors[p]
+
+    def mark_wanted(self, rev, parents):
+        # Mark rev ss wanted and update data structures.
+
+        # rev will be in the restricted graph, so children of rev in
+        # the original graph should still have rev as a parent in the
+        # restricted graph.
+        self.parentmap[rev] = rev
+
+        # The set of wanted ancestors of rev is the union of the sets
+        # of wanted ancestors of its parents. Plus rev itself.
+        wrev = util.set()
+        for p in parents:
+            wrev.update(self.wantedancestors[p])
+        wrev.add(rev)
+        self.wantedancestors[rev] = wrev
+
+    def getchanges(self, rev):
+        parents = self.commits[rev].parents
+        if len(parents) > 1:
+            self.rebuild()
+
+        # To decide whether we're interested in rev we:
+        #
+        # - calculate what parents rev will have if it turns out we're
+        #   interested in it.  If it's going to have more than 1 parent,
+        #   we're interested in it.
+        #
+        # - otherwise, we'll compare it with the single parent we found.
+        #   If any of the files we're interested in is different in the
+        #   the two revisions, we're interested in rev.
+
+        # A parent p is interesting if its mapped version (self.parentmap[p]):
+        # - is not SKIPREV
+        # - is still not in the list of parents (we don't want duplicates)
+        # - is not an ancestor of the mapped versions of the other parents
+        mparents = []
+        wp = None
+        for i, p1 in enumerate(parents):
+            mp1 = self.parentmap[p1]
+            if mp1 == SKIPREV or mp1 in mparents:
+                continue
+            for p2 in parents:
+                if p1 == p2 or mp1 == self.parentmap[p2]:
+                    continue
+                if mp1 in self.wantedancestors[p2]:
+                    break
+            else:
+                mparents.append(mp1)
+                wp = i
+
+        if wp is None and parents:
+            wp = 0
+
+        self.origparents[rev] = parents
+
+        if len(mparents) < 2 and not self.wanted(rev, wp):
+            # We don't want this revision.
+            # Update our state and tell the convert process to map this
+            # revision to the same revision its parent as mapped to.
+            p = None
+            if parents:
+                p = parents[wp]
+            self.mark_not_wanted(rev, p)
+            self.convertedorder.append((rev, False, p))
+            self._discard(*parents)
+            return self.parentmap[rev]
+
+        # We want this revision.
+        # Rewrite the parents of the commit object
+        self.commits[rev].parents = mparents
+        self.mark_wanted(rev, parents)
+        self.convertedorder.append((rev, True, None))
+        self._discard(*parents)
+
+        # Get the real changes and do the filtering/mapping.
+        # To be able to get the files later on in getfile and getmode,
+        # we hide the original filename in the rev part of the return
+        # value.
+        changes, copies = self.base.getchanges(rev)
+        newnames = {}
+        files = []
+        for f, r in changes:
+            newf = self.filemapper(f)
+            if newf:
+                files.append((newf, (f, r)))
+                newnames[f] = newf
+
+        ncopies = {}
+        for c in copies:
+            newc = self.filemapper(c)
+            if newc:
+                newsource = self.filemapper(copies[c])
+                if newsource:
+                    ncopies[newc] = newsource
+
+        return files, ncopies
+
+    def getfile(self, name, rev):
+        realname, realrev = rev
+        return self.base.getfile(realname, realrev)
+
+    def getmode(self, name, rev):
+        realname, realrev = rev
+        return self.base.getmode(realname, realrev)
+
+    def gettags(self):
+        return self.base.gettags()
--- a/hgext/convert/git.py	Thu Jul 26 07:56:27 2007 -0400
+++ b/hgext/convert/git.py	Wed Feb 06 19:57:52 2008 -0800
@@ -1,12 +1,28 @@
 # git support for the convert extension
 
 import os
+from mercurial import util
 
-from common import NoRepo, commit, converter_source
+from common import NoRepo, commit, converter_source, checktool
 
 class convert_git(converter_source):
-    def gitcmd(self, s):
-        return os.popen('GIT_DIR=%s %s' % (self.path, s))
+    # Windows does not support GIT_DIR= construct while other systems
+    # cannot remove environment variable. Just assume none have
+    # both issues.
+    if hasattr(os, 'unsetenv'):
+        def gitcmd(self, s):
+            prevgitdir = os.environ.get('GIT_DIR')
+            os.environ['GIT_DIR'] = self.path
+            try:
+                return util.popen(s)
+            finally:
+                if prevgitdir is None:
+                    del os.environ['GIT_DIR']
+                else:
+                    os.environ['GIT_DIR'] = prevgitdir
+    else:
+        def gitcmd(self, s):
+            return util.popen('GIT_DIR=%s %s' % (self.path, s))
 
     def __init__(self, ui, path, rev=None):
         super(convert_git, self).__init__(ui, path, rev=rev)
@@ -14,7 +30,10 @@
         if os.path.isdir(path + "/.git"):
             path += "/.git"
         if not os.path.exists(path + "/objects"):
-            raise NoRepo("couldn't open GIT repo %s" % path)
+            raise NoRepo("%s does not look like a Git repo" % path)
+
+        checktool('git-rev-parse', 'git')
+
         self.path = path
 
     def getheads(self):
@@ -26,7 +45,7 @@
 
     def catfile(self, rev, type):
         if rev == "0" * 40: raise IOError()
-        fh = self.gitcmd("git-cat-file %s %s 2>/dev/null" % (type, rev))
+        fh = self.gitcmd("git-cat-file %s %s" % (type, rev))
         return fh.read()
 
     def getfile(self, name, rev):
@@ -39,16 +58,21 @@
         self.modecache = {}
         fh = self.gitcmd("git-diff-tree --root -m -r %s" % version)
         changes = []
+        seen = {}
         for l in fh:
-            if "\t" not in l: continue
+            if "\t" not in l:
+                continue
             m, f = l[:-1].split("\t")
+            if f in seen:
+                continue
+            seen[f] = 1
             m = m.split()
             h = m[3]
             p = (m[1] == "100755")
             s = (m[1] == "120000")
             self.modecache[(f, h)] = (p and "x") or (s and "l") or ""
             changes.append((f, h))
-        return changes
+        return (changes, {})
 
     def getcommit(self, version):
         c = self.catfile(version, "commit") # read the commit hash
@@ -78,7 +102,6 @@
         tzs, tzh, tzm = tz[-5:-4] + "1", tz[-4:-2], tz[-2:]
         tz = -int(tzs) * (int(tzh) * 3600 + int(tzm))
         date = tm + " " + str(tz)
-        author = author or "unknown"
 
         c = commit(parents=parents, date=date, author=author, desc=message,
                    rev=version)
@@ -86,7 +109,7 @@
 
     def gettags(self):
         tags = {}
-        fh = self.gitcmd('git-ls-remote --tags "%s" 2>/dev/null' % self.path)
+        fh = self.gitcmd('git-ls-remote --tags "%s"' % self.path)
         prefix = 'refs/tags/'
         for line in fh:
             line = line.strip()
@@ -99,3 +122,21 @@
             tags[tag] = node
 
         return tags
+
+    def getchangedfiles(self, version, i):
+        changes = []
+        if i is None:
+            fh = self.gitcmd("git-diff-tree --root -m -r %s" % version)
+            for l in fh:
+                if "\t" not in l:
+                    continue
+                m, f = l[:-1].split("\t")
+                changes.append(f)
+            fh.close()
+        else:
+            fh = self.gitcmd('git-diff-tree --name-only --root -r %s "%s^%s" --'
+                             % (version, version, i+1))
+            changes = [f.rstrip('\n') for f in fh]
+            fh.close()
+
+        return changes
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/hgext/convert/gnuarch.py	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,267 @@
+# GNU Arch support for the convert extension
+
+from common import NoRepo, checktool, commandline, commit, converter_source
+from mercurial.i18n import _
+from mercurial import util
+import os, shutil, tempfile, stat
+
+class gnuarch_source(converter_source, commandline):
+
+    class gnuarch_rev:
+        def __init__(self, rev):
+            self.rev = rev
+            self.summary = ''
+            self.date = None
+            self.author = ''
+            self.add_files = []
+            self.mod_files = []
+            self.del_files = []
+            self.ren_files = {}
+            self.ren_dirs = {}
+
+    def __init__(self, ui, path, rev=None):
+        super(gnuarch_source, self).__init__(ui, path, rev=rev)
+
+        if not os.path.exists(os.path.join(path, '{arch}')):
+            raise NoRepo(_("couldn't open GNU Arch repo %s" % path))
+
+        # Could use checktool, but we want to check for baz or tla.
+        self.execmd = None
+        if util.find_exe('tla'):
+            self.execmd = 'tla'
+        else:
+            if util.find_exe('baz'):
+                self.execmd = 'baz'
+            else:
+                raise util.Abort(_('cannot find a GNU Arch tool'))
+
+        commandline.__init__(self, ui, self.execmd)
+
+        self.path = os.path.realpath(path)
+        self.tmppath = None
+
+        self.treeversion = None
+        self.lastrev = None
+        self.changes = {}
+        self.parents = {}
+        self.tags = {}
+        self.modecache = {}
+
+    def before(self):
+        if self.execmd == 'tla':
+            output = self.run0('tree-version', self.path)
+        else:
+            output = self.run0('tree-version', '-d', self.path)
+        self.treeversion = output.strip()
+
+        self.ui.status(_('analyzing tree version %s...\n' % self.treeversion))
+
+        # Get name of temporary directory
+        version = self.treeversion.split('/')
+        self.tmppath = os.path.join(tempfile.gettempdir(),
+                                    'hg-%s' % version[1])
+
+        # Generate parents dictionary
+        child = []
+        output, status = self.runlines('revisions', self.treeversion)
+        self.checkexit(status, 'archive registered?')
+        for l in output:
+            rev = l.strip()
+            self.changes[rev] = self.gnuarch_rev(rev)
+
+            # Read author, date and summary
+            catlog = self.runlines0('cat-log', '-d', self.path, rev)
+            self._parsecatlog(catlog, rev)
+
+            self.parents[rev] = child
+            child = [rev]
+            if rev == self.rev:
+                break
+        self.parents[None] = child
+
+    def after(self):
+        self.ui.debug(_('cleaning up %s\n' % self.tmppath))
+        shutil.rmtree(self.tmppath, ignore_errors=True)
+
+    def getheads(self):
+        return self.parents[None]
+
+    def getfile(self, name, rev):
+        if rev != self.lastrev:
+            raise util.Abort(_('internal calling inconsistency'))
+
+        # Raise IOError if necessary (i.e. deleted files).
+        if not os.path.exists(os.path.join(self.tmppath, name)):
+            raise IOError
+
+        data, mode = self._getfile(name, rev)
+        self.modecache[(name, rev)] = mode
+
+        return data
+
+    def getmode(self, name, rev):
+        return self.modecache[(name, rev)]
+
+    def getchanges(self, rev):
+        self.modecache = {}
+        self._update(rev)
+        changes = []
+        copies = {}
+
+        for f in self.changes[rev].add_files:
+            changes.append((f, rev))
+
+        for f in self.changes[rev].mod_files:
+            changes.append((f, rev))
+
+        for f in self.changes[rev].del_files:
+            changes.append((f, rev))
+
+        for src in self.changes[rev].ren_files:
+            to = self.changes[rev].ren_files[src]
+            changes.append((src, rev))
+            changes.append((to, rev))
+            copies[src] = to
+
+        for src in self.changes[rev].ren_dirs:
+            to = self.changes[rev].ren_dirs[src]
+            chgs, cps = self._rendirchanges(src, to);
+            changes += [(f, rev) for f in chgs]
+            for c in cps:
+                copies[c] = cps[c]
+
+        changes.sort()
+        self.lastrev = rev
+
+        return changes, copies
+
+    def getcommit(self, rev):
+        changes = self.changes[rev]
+        return commit(author = changes.author, date = changes.date,
+                      desc = changes.summary, parents = self.parents[rev])
+
+    def gettags(self):
+        return self.tags
+
+    def _execute(self, cmd, *args, **kwargs):
+        cmdline = [self.execmd, cmd]
+        cmdline += args
+        cmdline = [util.shellquote(arg) for arg in cmdline]
+        cmdline += ['>', util.nulldev, '2>', util.nulldev]
+        cmdline = util.quotecommand(' '.join(cmdline))
+        self.ui.debug(cmdline, '\n')
+        return os.system(cmdline)
+
+    def _update(self, rev):
+        if rev == 'base-0':
+            # Initialise 'base-0' revision
+            self.ui.debug(_('obtaining revision %s...\n' % rev))
+            revision = '%s--%s' % (self.treeversion, rev)
+            output = self._execute('get', revision, self.tmppath)
+            self.ui.debug(_('analysing revision %s...\n' % rev))
+            files = self._readcontents(self.tmppath)
+            self.changes[rev].add_files += files
+        else:
+            self.ui.debug(_('applying revision %s...\n' % rev))
+            revision = '%s--%s' % (self.treeversion, rev)
+            output = self._execute('replay', '-d', self.tmppath, revision)
+
+            old_rev = self.parents[rev][0]
+            self.ui.debug(_('computing changeset between %s and %s...\n' \
+                               % (old_rev, rev)))
+            rev_a = '%s--%s' % (self.treeversion, old_rev)
+            rev_b = '%s--%s' % (self.treeversion, rev)
+            delta = self.runlines0('delta', '-n', rev_a, rev_b)
+            self._parsedelta(delta, rev)
+
+    def _getfile(self, name, rev):
+        mode = os.lstat(os.path.join(self.tmppath, name)).st_mode
+        if stat.S_ISLNK(mode):
+            data = os.readlink(os.path.join(self.tmppath, name))
+            mode = mode and 'l' or ''
+        else:
+            data = open(os.path.join(self.tmppath, name), 'rb').read()
+            mode = (mode & 0111) and 'x' or ''
+        return data, mode
+
+    def _exclude(self, name):
+        exclude = [ '{arch}', '.arch-ids', '.arch-inventory' ]
+        for exc in exclude:
+            if name.find(exc) != -1:
+                return True
+        return False
+
+    def _readcontents(self, path):
+        files = []
+        contents = os.listdir(path)
+        while len(contents) > 0:
+            c = contents.pop()
+            p = os.path.join(path, c)
+            if not self._exclude(p):
+                if os.path.isdir(p):
+                    contents += [os.path.join(c, f) for f in os.listdir(p)]
+                else:
+                    files.append(c)
+        return files
+
+    def _rendirchanges(self, src, dest):
+        changes = []
+        copies = {}
+        files = self._readcontents(os.path.join(self.tmppath, dest))
+        for f in files:
+            s = os.path.join(src, f)
+            d = os.path.join(dest, f)
+            changes.append(s)
+            changes.append(d)
+            copies[s] = d
+        return changes, copies
+
+    def _parsecatlog(self, data, rev):
+        readingsummary = False
+        for l in data:
+            l = l.strip()
+            if l.startswith('Standard-date:'):
+                date = l[len('Standard-date: '):]
+                strdate = util.strdate(date, '%Y-%m-%d %H:%M:%S')
+                self.changes[rev].date = util.datestr(strdate)
+
+            if l.startswith('Creator:'):
+                self.changes[rev].author = l[len('Creator: '):]
+
+            if not readingsummary and l.startswith('Summary:'):
+                readingsummary = True
+                self.changes[rev].summary = l[len('Summary: '):]
+            elif not l.startswith('Keywords:'):
+                self.changes[rev].summary += '\n%s' % l
+
+    def _parsedelta(self, data, rev):
+        for l in data:
+            l = l.strip()
+            if l.startswith('A') and not l.startswith('A/'):
+                file = l[1:].strip()
+                if not self._exclude(file):
+                    self.changes[rev].add_files.append(file)
+            elif l.startswith('/>'):
+                dirs = l[2:].strip().split(' ')
+                if len(dirs) == 1:
+                    dirs = l[2:].strip().split('\t')
+                if not self._exclude(dirs[0]) and not self._exclude(dirs[1]):
+                    self.changes[rev].ren_dirs[dirs[0]] = dirs[1]
+            elif l.startswith('M'):
+                file = l[1:].strip()
+                if not self._exclude(file):
+                    self.changes[rev].mod_files.append(file)
+            elif l.startswith('->'):
+                file = l[2:].strip()
+                if not self._exclude(file):
+                    self.changes[rev].mod_files.append(file)
+            elif l.startswith('D') and not l.startswith('D/'):
+                file = l[1:].strip()
+                if not self._exclude(file):
+                    self.changes[rev].del_files.append(file)
+            elif l.startswith('=>'):
+                files = l[2:].strip().split(' ')
+                if len(files) == 1:
+                    files = l[2:].strip().split('\t')
+                if not self._exclude(files[0]) and not self._exclude(files[1]):
+                    self.changes[rev].ren_files[files[0]] = files[1]
--- a/hgext/convert/hg.py	Thu Jul 26 07:56:27 2007 -0400
+++ b/hgext/convert/hg.py	Wed Feb 06 19:57:52 2008 -0800
@@ -1,20 +1,67 @@
 # hg backend for convert extension
 
+# Notes for hg->hg conversion:
+#
+# * Old versions of Mercurial didn't trim the whitespace from the ends
+#   of commit messages, but new versions do.  Changesets created by
+#   those older versions, then converted, may thus have different
+#   hashes for changesets that are otherwise identical.
+#
+# * By default, the source revision is stored in the converted
+#   revision.  This will cause the converted revision to have a
+#   different identity than the source.  To avoid this, use the
+#   following option: "--config convert.hg.saverev=false"
+
+
 import os, time
-from mercurial import hg
+from mercurial.i18n import _
+from mercurial.node import *
+from mercurial import hg, lock, revlog, util
 
-from common import NoRepo, converter_sink
+from common import NoRepo, commit, converter_source, converter_sink
 
-class convert_mercurial(converter_sink):
+class mercurial_sink(converter_sink):
     def __init__(self, ui, path):
-        self.path = path
-        self.ui = ui
-        try:
-            self.repo = hg.repository(self.ui, path)
-        except:
-            raise NoRepo("could open hg repo %s" % path)
+        converter_sink.__init__(self, ui, path)
+        self.branchnames = ui.configbool('convert', 'hg.usebranchnames', True)
+        self.clonebranches = ui.configbool('convert', 'hg.clonebranches', False)
+        self.tagsbranch = ui.config('convert', 'hg.tagsbranch', 'default')
+        self.lastbranch = None
+        if os.path.isdir(path) and len(os.listdir(path)) > 0:
+            try:
+                self.repo = hg.repository(self.ui, path)
+                if not self.repo.local():
+                    raise NoRepo(_('%s is not a local Mercurial repo') % path)
+            except hg.RepoError, err:
+                ui.print_exc()
+                raise NoRepo(err.args[0])
+        else:
+            try:
+                ui.status(_('initializing destination %s repository\n') % path)
+                self.repo = hg.repository(self.ui, path, create=True)
+                if not self.repo.local():
+                    raise NoRepo(_('%s is not a local Mercurial repo') % path)
+                self.created.append(path)
+            except hg.RepoError, err:
+                ui.print_exc()
+                raise NoRepo("could not create hg repo %s as sink" % path)
+        self.lock = None
+        self.wlock = None
+        self.filemapmode = False
 
-    def mapfile(self):
+    def before(self):
+        self.ui.debug(_('run hg sink pre-conversion action\n'))
+        self.wlock = self.repo.wlock()
+        self.lock = self.repo.lock()
+        self.repo.dirstate.clear()
+
+    def after(self):
+        self.ui.debug(_('run hg sink post-conversion action\n'))
+        self.repo.dirstate.invalidate()
+        self.lock = None
+        self.wlock = None
+
+    def revmapfile(self):
         return os.path.join(self.path, ".hg", "shamap")
 
     def authorfile(self):
@@ -22,23 +69,61 @@
 
     def getheads(self):
         h = self.repo.changelog.heads()
-        return [ hg.hex(x) for x in h ]
+        return [ hex(x) for x in h ]
 
     def putfile(self, f, e, data):
         self.repo.wwrite(f, data, e)
         if f not in self.repo.dirstate:
-            self.repo.dirstate.add(f)
+            self.repo.dirstate.normallookup(f)
 
     def copyfile(self, source, dest):
         self.repo.copy(source, dest)
 
     def delfile(self, f):
         try:
-            os.unlink(self.repo.wjoin(f))
+            util.unlink(self.repo.wjoin(f))
             #self.repo.remove([f])
-        except:
+        except OSError:
             pass
 
+    def setbranch(self, branch, pbranches):
+        if not self.clonebranches:
+            return
+
+        setbranch = (branch != self.lastbranch)
+        self.lastbranch = branch
+        if not branch:
+            branch = 'default'
+        pbranches = [(b[0], b[1] and b[1] or 'default') for b in pbranches]
+        pbranch = pbranches and pbranches[0][1] or 'default'
+
+        branchpath = os.path.join(self.path, branch)
+        if setbranch:
+            self.after()
+            try:
+                self.repo = hg.repository(self.ui, branchpath)
+            except:
+                self.repo = hg.repository(self.ui, branchpath, create=True)
+            self.before()
+
+        # pbranches may bring revisions from other branches (merge parents)
+        # Make sure we have them, or pull them.
+        missings = {}
+        for b in pbranches:
+            try:
+                self.repo.lookup(b[0])
+            except:
+                missings.setdefault(b[1], []).append(b[0])
+        
+        if missings:
+            self.after()
+            for pbranch, heads in missings.iteritems():
+                pbranchpath = os.path.join(self.path, pbranch)
+                prepo = hg.repository(self.ui, pbranchpath)
+                self.ui.note(_('pulling from %s into %s\n') % (pbranch, branch))
+                self.repo.pull(prepo, [prepo.lookup(h) for h in heads])
+            self.before()
+
     def putcommit(self, files, parents, commit):
         seen = {}
         pl = []
@@ -47,14 +132,18 @@
                 pl.append(p)
                 seen[p] = 1
         parents = pl
+        nparents = len(parents)
+        if self.filemapmode and nparents == 1:
+            m1node = self.repo.changelog.read(bin(parents[0]))[0]
+            parent = parents[0]
 
         if len(parents) < 2: parents.append("0" * 40)
         if len(parents) < 2: parents.append("0" * 40)
         p2 = parents.pop(0)
 
         text = commit.desc
-        extra = {}
-        if commit.branch:
+        extra = commit.extra.copy()
+        if self.branchnames and commit.branch:
             extra['branch'] = commit.branch
         if commit.rev:
             extra['convert_revision'] = commit.rev
@@ -63,10 +152,18 @@
             p1 = p2
             p2 = parents.pop(0)
             a = self.repo.rawcommit(files, text, commit.author, commit.date,
-                                    hg.bin(p1), hg.bin(p2), extra=extra)
+                                    bin(p1), bin(p2), extra=extra)
+            self.repo.dirstate.clear()
             text = "(octopus merge fixup)\n"
             p2 = hg.hex(self.repo.changelog.tip())
 
+        if self.filemapmode and nparents == 1:
+            man = self.repo.manifest
+            mnode = self.repo.changelog.read(bin(p2))[0]
+            if not man.cmp(m1node, man.revision(mnode)):
+                self.repo.rollback()
+                self.repo.dirstate.clear()
+                return parent
         return p2
 
     def puttags(self, tags):
@@ -92,6 +189,113 @@
             f.close()
             if not oldlines: self.repo.add([".hgtags"])
             date = "%s 0" % int(time.mktime(time.gmtime()))
+            extra = {}
+            if self.tagsbranch != 'default':
+                extra['branch'] = self.tagsbranch
+            try:
+                tagparent = self.repo.changectx(self.tagsbranch).node()
+            except hg.RepoError, inst:
+                tagparent = nullid
             self.repo.rawcommit([".hgtags"], "update tags", "convert-repo",
-                                date, self.repo.changelog.tip(), hg.nullid)
-            return hg.hex(self.repo.changelog.tip())
+                                date, tagparent, nullid, extra=extra)
+            return hex(self.repo.changelog.tip())
+
+    def setfilemapmode(self, active):
+        self.filemapmode = active
+
+class mercurial_source(converter_source):
+    def __init__(self, ui, path, rev=None):
+        converter_source.__init__(self, ui, path, rev)
+        self.saverev = ui.configbool('convert', 'hg.saverev', True)
+        try:
+            self.repo = hg.repository(self.ui, path)
+            # try to provoke an exception if this isn't really a hg
+            # repo, but some other bogus compatible-looking url
+            if not self.repo.local():
+                raise hg.RepoError()
+        except hg.RepoError:
+            ui.print_exc()
+            raise NoRepo("%s is not a local Mercurial repo" % path)
+        self.lastrev = None
+        self.lastctx = None
+        self._changescache = None
+        self.convertfp = None
+
+    def changectx(self, rev):
+        if self.lastrev != rev:
+            self.lastctx = self.repo.changectx(rev)
+            self.lastrev = rev
+        return self.lastctx
+
+    def getheads(self):
+        if self.rev:
+            return [hex(self.repo.changectx(self.rev).node())]
+        else:
+            return [hex(node) for node in self.repo.heads()]
+
+    def getfile(self, name, rev):
+        try:
+            return self.changectx(rev).filectx(name).data()
+        except revlog.LookupError, err:
+            raise IOError(err)
+
+    def getmode(self, name, rev):
+        m = self.changectx(rev).manifest()
+        return (m.execf(name) and 'x' or '') + (m.linkf(name) and 'l' or '')
+
+    def getchanges(self, rev):
+        ctx = self.changectx(rev)
+        if self._changescache and self._changescache[0] == rev:
+            m, a, r = self._changescache[1]
+        else:
+            m, a, r = self.repo.status(ctx.parents()[0].node(), ctx.node())[:3]
+        changes = [(name, rev) for name in m + a + r]
+        changes.sort()
+        return (changes, self.getcopies(ctx, m + a))
+
+    def getcopies(self, ctx, files):
+        copies = {}
+        for name in files:
+            try:
+                copies[name] = ctx.filectx(name).renamed()[0]
+            except TypeError:
+                pass
+        return copies
+
+    def getcommit(self, rev):
+        ctx = self.changectx(rev)
+        parents = [hex(p.node()) for p in ctx.parents() if p.node() != nullid]
+        if self.saverev:
+            crev = rev
+        else:
+            crev = None
+        return commit(author=ctx.user(), date=util.datestr(ctx.date()),
+                      desc=ctx.description(), rev=crev, parents=parents,
+                      branch=ctx.branch(), extra=ctx.extra())
+
+    def gettags(self):
+        tags = [t for t in self.repo.tagslist() if t[0] != 'tip']
+        return dict([(name, hex(node)) for name, node in tags])
+
+    def getchangedfiles(self, rev, i):
+        ctx = self.changectx(rev)
+        i = i or 0
+        changes = self.repo.status(ctx.parents()[i].node(), ctx.node())[:3]
+
+        if i == 0:
+            self._changescache = (rev, changes)
+
+        return changes[0] + changes[1] + changes[2]
+
+    def converted(self, rev, destrev):
+        if self.convertfp is None:
+            self.convertfp = open(os.path.join(self.path, '.hg', 'shamap'),
+                                  'a')
+        self.convertfp.write('%s %s\n' % (destrev, rev))
+        self.convertfp.flush()
+
+    def before(self):
+        self.ui.debug(_('run hg source pre-conversion action\n'))
+
+    def after(self):
+        self.ui.debug(_('run hg source post-conversion action\n'))
--- a/hgext/convert/subversion.py	Thu Jul 26 07:56:27 2007 -0400
+++ b/hgext/convert/subversion.py	Wed Feb 06 19:57:52 2008 -0800
@@ -8,6 +8,8 @@
 #   Relative path to the trunk (default: "trunk")
 # convert.svn.branches
 #   Relative path to tree of branches (default: "branches")
+# convert.svn.tags
+#   Relative path to tree of tags (default: "tags")
 #
 # Set these in a hgrc, or on the command line as follows:
 #
@@ -15,8 +17,13 @@
 
 import locale
 import os
+import re
+import sys
 import cPickle as pickle
-from mercurial import util
+import tempfile
+
+from mercurial import strutil, util
+from mercurial.i18n import _
 
 # Subversion stuff. Works best with very recent Python SVN bindings
 # e.g. SVN 1.5 or backports. Thanks to the bzr folks for enhancing
@@ -24,19 +31,37 @@
 
 from cStringIO import StringIO
 
-from common import NoRepo, commit, converter_source
+from common import NoRepo, commit, converter_source, encodeargs, decodeargs
+from common import commandline, converter_sink, mapfile
 
 try:
     from svn.core import SubversionException, Pool
+    import svn
+    import svn.client
     import svn.core
     import svn.ra
     import svn.delta
-    import svn
     import transport
 except ImportError:
     pass
 
-class CompatibilityException(Exception): pass
+def geturl(path):
+    try:
+        return svn.client.url_from_path(svn.core.svn_path_canonicalize(path))
+    except SubversionException:
+        pass
+    if os.path.isdir(path):
+        path = os.path.normpath(os.path.abspath(path))
+        if os.name == 'nt':
+            path = '/' + util.normpath(path)
+        return 'file://%s' % path
+    return path
+
+def optrev(number):
+    optrev = svn.core.svn_opt_revision_t()
+    optrev.kind = svn.core.svn_opt_revision_number
+    optrev.value.number = number
+    return optrev
 
 class changedpath(object):
     def __init__(self, p):
@@ -44,27 +69,102 @@
         self.copyfrom_rev = p.copyfrom_rev
         self.action = p.action
 
+def get_log_child(fp, url, paths, start, end, limit=0, discover_changed_paths=True,
+                    strict_node_history=False):
+    protocol = -1
+    def receiver(orig_paths, revnum, author, date, message, pool):
+        if orig_paths is not None:
+            for k, v in orig_paths.iteritems():
+                orig_paths[k] = changedpath(v)
+        pickle.dump((orig_paths, revnum, author, date, message),
+                    fp, protocol)
+
+    try:
+        # Use an ra of our own so that our parent can consume
+        # our results without confusing the server.
+        t = transport.SvnRaTransport(url=url)
+        svn.ra.get_log(t.ra, paths, start, end, limit,
+                       discover_changed_paths,
+                       strict_node_history,
+                       receiver)
+    except SubversionException, (inst, num):
+        pickle.dump(num, fp, protocol)
+    except IOError:
+        # Caller may interrupt the iteration
+        pickle.dump(None, fp, protocol)
+    else:
+        pickle.dump(None, fp, protocol)
+    fp.close()
+
+def debugsvnlog(ui, **opts):
+    """Fetch SVN log in a subprocess and channel them back to parent to
+    avoid memory collection issues.
+    """
+    util.set_binary(sys.stdin)
+    util.set_binary(sys.stdout)
+    args = decodeargs(sys.stdin.read())
+    get_log_child(sys.stdout, *args)
+
+class logstream:
+    """Interruptible revision log iterator."""
+    def __init__(self, stdout):
+        self._stdout = stdout
+
+    def __iter__(self):
+        while True:
+            entry = pickle.load(self._stdout)
+            try:
+                orig_paths, revnum, author, date, message = entry
+            except:
+                if entry is None:
+                    break
+                raise SubversionException("child raised exception", entry)
+            yield entry
+
+    def close(self):
+        if self._stdout:
+            self._stdout.close()
+            self._stdout = None
+
+def get_log(url, paths, start, end, limit=0, discover_changed_paths=True,
+                strict_node_history=False):
+    args = [url, paths, start, end, limit, discover_changed_paths,
+            strict_node_history]
+    arg = encodeargs(args)
+    hgexe = util.hgexecutable()
+    cmd = '%s debugsvnlog' % util.shellquote(hgexe)
+    stdin, stdout = os.popen2(cmd, 'b')
+    stdin.write(arg)
+    stdin.close()
+    return logstream(stdout)
+
 # SVN conversion code stolen from bzr-svn and tailor
-class convert_svn(converter_source):
+#
+# Subversion looks like a versioned filesystem, branches structures
+# are defined by conventions and not enforced by the tool. First,
+# we define the potential branches (modules) as "trunk" and "branches"
+# children directories. Revisions are then identified by their
+# module and revision number (and a repository identifier).
+#
+# The revision graph is really a tree (or a forest). By default, a
+# revision parent is the previous revision in the same module. If the
+# module directory is copied/moved from another module then the
+# revision is the module root and its parent the source revision in
+# the parent module. A revision has at most one parent.
+#
+class svn_source(converter_source):
     def __init__(self, ui, url, rev=None):
-        super(convert_svn, self).__init__(ui, url, rev=rev)
+        super(svn_source, self).__init__(ui, url, rev=rev)
 
         try:
             SubversionException
         except NameError:
-            msg = 'subversion python bindings could not be loaded\n'
-            ui.warn(msg)
-            raise NoRepo(msg)
+            raise NoRepo('Subversion python bindings could not be loaded')
 
         self.encoding = locale.getpreferredencoding()
         self.lastrevs = {}
 
         latest = None
-        if rev:
-            try:
-                latest = int(rev)
-            except ValueError:
-                raise util.Abort('svn: revision %s is not an integer' % rev)
         try:
             # Support file://path@rev syntax. Useful e.g. to convert
             # deleted branches.
@@ -74,33 +174,50 @@
                 url = url[:at]
         except ValueError, e:
             pass
-        self.url = url
+        self.url = geturl(url)
         self.encoding = 'UTF-8' # Subversion is always nominal UTF-8
         try:
-            self.transport = transport.SvnRaTransport(url=url)
+            self.transport = transport.SvnRaTransport(url=self.url)
             self.ra = self.transport.ra
             self.ctx = self.transport.client
             self.base = svn.ra.get_repos_root(self.ra)
             self.module = self.url[len(self.base):]
-            self.modulemap = {} # revision, module
+            self.rootmodule = self.module
             self.commits = {}
-            self.files = {}
+            self.paths = {}
             self.uuid = svn.ra.get_uuid(self.ra).decode(self.encoding)
         except SubversionException, e:
-            raise NoRepo("couldn't open SVN repo %s" % url)
+            ui.print_exc()
+            raise NoRepo("%s does not look like a Subversion repo" % self.url)
+
+        if rev:
+            try:
+                latest = int(rev)
+            except ValueError:
+                raise util.Abort('svn: revision %s is not an integer' % rev)
 
         try:
             self.get_blacklist()
         except IOError, e:
             pass
 
-        self.last_changed = self.latest(self.module, latest)
+        self.head = self.latest(self.module, latest)
+        if not self.head:
+            raise util.Abort(_('no revision found in module %s') %
+                             self.module.encode(self.encoding))
+        self.last_changed = self.revnum(self.head)
+        
+        self._changescache = None
 
-        self.head = self.revid(self.last_changed)
+        if os.path.exists(os.path.join(url, '.svn/entries')):
+            self.wc = url
+        else:
+            self.wc = None
+        self.convertfp = None
 
     def setrevmap(self, revmap):
         lastrevs = {}
-        for revid in revmap.keys():
+        for revid in revmap.iterkeys():
             uuid, module, revnum = self.revsplit(revid)
             lastrevnum = lastrevs.setdefault(module, revnum)
             if revnum > lastrevnum:
@@ -109,46 +226,61 @@
 
     def exists(self, path, optrev):
         try:
-            return svn.client.ls(self.url.rstrip('/') + '/' + path,
+            svn.client.ls(self.url.rstrip('/') + '/' + path,
                                  optrev, False, self.ctx)
+            return True
         except SubversionException, err:
-            return []
+            return False
 
     def getheads(self):
-        # detect standard /branches, /tags, /trunk layout
-        optrev = svn.core.svn_opt_revision_t()
-        optrev.kind = svn.core.svn_opt_revision_number
-        optrev.value.number = self.last_changed
-        rpath = self.url.strip('/')
-        cfgtrunk = self.ui.config('convert', 'svn.trunk')
-        cfgbranches = self.ui.config('convert', 'svn.branches')
-        trunk = (cfgtrunk or 'trunk').strip('/')
-        branches = (cfgbranches or 'branches').strip('/')
-        if self.exists(trunk, optrev) and self.exists(branches, optrev):
-            self.ui.note('found trunk at %r and branches at %r\n' %
-                         (trunk, branches))
-            oldmodule = self.module
+
+        def getcfgpath(name, rev):
+            cfgpath = self.ui.config('convert', 'svn.' + name)
+            path = (cfgpath or name).strip('/')
+            if not self.exists(path, rev):
+                if cfgpath:
+                    raise util.Abort(_('expected %s to be at %r, but not found')
+                                 % (name, path))
+                return None
+            self.ui.note(_('found %s at %r\n') % (name, path))
+            return path
+
+        rev = optrev(self.last_changed)
+        oldmodule = ''
+        trunk = getcfgpath('trunk', rev)
+        tags = getcfgpath('tags', rev)
+        branches = getcfgpath('branches', rev)
+
+        # If the project has a trunk or branches, we will extract heads
+        # from them. We keep the project root otherwise.
+        if trunk:
+            oldmodule = self.module or ''
             self.module += '/' + trunk
-            lt = self.latest(self.module, self.last_changed)
-            self.head = self.revid(lt)
-            self.heads = [self.head]
-            branchnames = svn.client.ls(rpath + '/' + branches, optrev, False,
+            self.head = self.latest(self.module, self.last_changed)
+            if not self.head:
+                raise util.Abort(_('no revision found in module %s') %
+                                 self.module.encode(self.encoding))
+
+        # First head in the list is the module's head
+        self.heads = [self.head]
+        self.tags = '%s/%s' % (oldmodule , (tags or 'tags'))
+
+        # Check if branches bring a few more heads to the list
+        if branches:
+            rpath = self.url.strip('/')
+            branchnames = svn.client.ls(rpath + '/' + branches, rev, False,
                                         self.ctx)
             for branch in branchnames.keys():
-                if oldmodule:
-                    module = '/' + oldmodule + '/' + branches + '/' + branch
-                else:
-                    module = '/' + branches + '/' + branch
-                brevnum = self.latest(module, self.last_changed)
-                brev = self.revid(brevnum, module)
-                self.ui.note('found branch %s at %d\n' % (branch, brevnum))
-                self.heads.append(brev)
-        elif cfgtrunk or cfgbranches:
-            raise util.Abort(_('trunk/branch layout expected, '
-                               'but not found'))
-        else:
-            self.ui.note('working with one branch\n')
-            self.heads = [self.head]
+                module = '%s/%s/%s' % (oldmodule, branches, branch)
+                brevid = self.latest(module, self.last_changed)
+                if not brevid:
+                    self.ui.note(_('ignoring empty branch %s\n') %
+                                   branch.encode(self.encoding))
+                    continue
+                self.ui.note('found branch %s at %d\n' % 
+                             (branch, self.revnum(brevid)))
+                self.heads.append(brevid)
+
         return self.heads
 
     def getfile(self, file, rev):
@@ -160,106 +292,88 @@
         return self.modecache[(file, rev)]
 
     def getchanges(self, rev):
+        if self._changescache and self._changescache[0] == rev:
+            return self._changescache[1]
+        self._changescache = None
         self.modecache = {}
-        files = self.files[rev]
-        cl = files
-        cl.sort()
+        (paths, parents) = self.paths[rev]
+        if parents:
+            files, copies = self.expandpaths(rev, paths, parents)
+        else:
+            # Perform a full checkout on roots
+            uuid, module, revnum = self.revsplit(rev)
+            entries = svn.client.ls(self.base + module, optrev(revnum), 
+                                    True, self.ctx)
+            files = [n for n,e in entries.iteritems() 
+                     if e.kind == svn.core.svn_node_file]
+            copies = {}
+
+        files.sort()
+        files = zip(files, [rev] * len(files))
+
         # caller caches the result, so free it here to release memory
-        del self.files[rev]
-        return cl
+        del self.paths[rev]
+        return (files, copies)
+
+    def getchangedfiles(self, rev, i):
+        changes = self.getchanges(rev)
+        self._changescache = (rev, changes)
+        return [f[0] for f in changes[0]]
 
     def getcommit(self, rev):
         if rev not in self.commits:
             uuid, module, revnum = self.revsplit(rev)
             self.module = module
             self.reparent(module)
+            # We assume that:
+            # - requests for revisions after "stop" come from the
+            # revision graph backward traversal. Cache all of them
+            # down to stop, they will be used eventually.
+            # - requests for revisions before "stop" come to get
+            # isolated branches parents. Just fetch what is needed.
             stop = self.lastrevs.get(module, 0)
-            self._fetch_revisions(from_revnum=revnum, to_revnum=stop)
+            if revnum < stop:
+                stop = revnum + 1
+            self._fetch_revisions(revnum, stop)
         commit = self.commits[rev]
         # caller caches the result, so free it here to release memory
         del self.commits[rev]
         return commit
 
-    def get_log(self, paths, start, end, limit=0, discover_changed_paths=True,
-                strict_node_history=False):
-        '''wrapper for svn.ra.get_log.
-        on a large repository, svn.ra.get_log pins huge amounts of
-        memory that cannot be recovered.  work around it by forking
-        and writing results over a pipe.'''
-
-        def child(fp):
-            protocol = -1
-            def receiver(orig_paths, revnum, author, date, message, pool):
-                if orig_paths is not None:
-                    for k, v in orig_paths.iteritems():
-                        orig_paths[k] = changedpath(v)
-                pickle.dump((orig_paths, revnum, author, date, message),
-                            fp, protocol)
-
-            try:
-                # Use an ra of our own so that our parent can consume
-                # our results without confusing the server.
-                t = transport.SvnRaTransport(url=self.url)
-                svn.ra.get_log(t.ra, paths, start, end, limit,
-                               discover_changed_paths,
-                               strict_node_history,
-                               receiver)
-            except SubversionException, (_, num):
-                self.ui.print_exc()
-                pickle.dump(num, fp, protocol)
-            else:
-                pickle.dump(None, fp, protocol)
-            fp.close()
-
-        def parent(fp):
-            while True:
-                entry = pickle.load(fp)
-                try:
-                    orig_paths, revnum, author, date, message = entry
-                except:
-                    if entry is None:
-                        break
-                    raise SubversionException("child raised exception", entry)
-                yield entry
-
-        rfd, wfd = os.pipe()
-        pid = os.fork()
-        if pid:
-            os.close(wfd)
-            for p in parent(os.fdopen(rfd, 'rb')):
-                yield p
-            ret = os.waitpid(pid, 0)[1]
-            if ret:
-                raise util.Abort(_('get_log %s') % util.explain_exit(ret))
-        else:
-            os.close(rfd)
-            child(os.fdopen(wfd, 'wb'))
-            os._exit(0)
-
     def gettags(self):
         tags = {}
         start = self.revnum(self.head)
         try:
-            for entry in self.get_log(['/tags'], 0, start):
+            for entry in get_log(self.url, [self.tags], 0, start):
                 orig_paths, revnum, author, date, message = entry
                 for path in orig_paths:
-                    if not path.startswith('/tags/'):
+                    if not path.startswith(self.tags+'/'):
                         continue
                     ent = orig_paths[path]
                     source = ent.copyfrom_path
                     rev = ent.copyfrom_rev
-                    tag = path.split('/', 2)[2]
+                    tag = path.split('/')[-1]
                     tags[tag] = self.revid(rev, module=source)
-        except SubversionException, (_, num):
+        except SubversionException, (inst, num):
             self.ui.note('no tags found at revision %d\n' % start)
         return tags
 
+    def converted(self, rev, destrev):
+        if not self.wc:
+            return
+        if self.convertfp is None:
+            self.convertfp = open(os.path.join(self.wc, '.svn', 'hg-shamap'),
+                                  'a')
+        self.convertfp.write('%s %d\n' % (destrev, self.revnum(rev)))
+        self.convertfp.flush()
+
     # -- helper functions --
 
     def revid(self, revnum, module=None):
         if not module:
             module = self.module
-        return (u"svn:%s%s@%s" % (self.uuid, module, revnum)).decode(self.encoding)
+        return u"svn:%s%s@%s" % (self.uuid, module.decode(self.encoding),
+                                 revnum)
 
     def revnum(self, rev):
         return int(rev.split('@')[-1])
@@ -275,7 +389,11 @@
         return uuid, mod, revnum
 
     def latest(self, path, stop=0):
-        'find the latest revision affecting path, up to stop'
+        """Find the latest revid affecting path, up to stop. It may return
+        a revision in a different module, since a branch may be moved without
+        a change being reported. Return None if computed module does not
+        belong to rootmodule subtree.
+        """
         if not stop:
             stop = svn.ra.get_latest_revnum(self.ra)
         try:
@@ -285,10 +403,33 @@
         except SubversionException:
             dirent = None
         if not dirent:
-            print self.base, path
             raise util.Abort('%s not found up to revision %d' % (path, stop))
 
-        return dirent.created_rev
+        # stat() gives us the previous revision on this line of development, but
+        # it might be in *another module*. Fetch the log and detect renames down
+        # to the latest revision.
+        stream = get_log(self.url, [path], stop, dirent.created_rev)
+        try:
+            for entry in stream:
+                paths, revnum, author, date, message = entry
+                if revnum <= dirent.created_rev:
+                    break
+
+                for p in paths:
+                    if not path.startswith(p) or not paths[p].copyfrom_path:
+                        continue
+                    newpath = paths[p].copyfrom_path + path[len(p):]
+                    self.ui.debug("branch renamed from %s to %s at %d\n" % 
+                                  (path, newpath, revnum))
+                    path = newpath
+                    break
+        finally:
+            stream.close()
+
+        if not path.startswith(self.rootmodule):
+            self.ui.debug(_('ignoring foreign branch %r\n') % path)
+            return None
+        return self.revid(dirent.created_rev, path)
 
     def get_blacklist(self):
         """Avoid certain revision numbers.
@@ -296,7 +437,7 @@
         out, e.g. 'I copied trunk into a subdirectory of itself instead
         of making a branch'. The converted repository is significantly
         smaller if we ignore such revisions."""
-        self.blacklist = set()
+        self.blacklist = util.set()
         blacklist = self.blacklist
         for line in file("blacklist.txt", "r"):
             if not line.startswith("#"):
@@ -314,7 +455,7 @@
         self.ui.debug("reparent to %s\n" % svn_url.encode(self.encoding))
         svn.ra.reparent(self.ra, svn_url.encode(self.encoding))
 
-    def _fetch_revisions(self, from_revnum = 0, to_revnum = 347):
+    def expandpaths(self, rev, paths, parents):
         def get_entry_from_path(path, module=self.module):
             # Given the repository url of this wc, say
             #   "http://server/plone/CMFPlone/branches/Plone-2_0-branch"
@@ -322,7 +463,6 @@
             # svn log --xml says, ie
             #   "/CMFPlone/branches/Plone-2_0-branch/tests/PloneTestCase.py"
             # that is to say "tests/PloneTestCase.py"
-
             if path.startswith(module):
                 relative = path[len(module):]
                 if relative.startswith('/'):
@@ -331,31 +471,238 @@
                     return relative
 
             # The path is outside our tracked tree...
-            self.ui.debug('Ignoring %r since it is not under %r\n' % (path, module))
+            self.ui.debug('%r is not under %r, ignoring\n' % (path, module))
             return None
 
+        entries = []
+        copyfrom = {} # Map of entrypath, revision for finding source of deleted revisions.
+        copies = {}
+
+        new_module, revnum = self.revsplit(rev)[1:]
+        if new_module != self.module:
+            self.module = new_module
+            self.reparent(self.module)
+
+        for path, ent in paths:
+            entrypath = get_entry_from_path(path, module=self.module)
+            entry = entrypath.decode(self.encoding)
+
+            kind = svn.ra.check_path(self.ra, entrypath, revnum)
+            if kind == svn.core.svn_node_file:
+                if ent.copyfrom_path:
+                    copyfrom_path = get_entry_from_path(ent.copyfrom_path)
+                    if copyfrom_path:
+                        self.ui.debug("Copied to %s from %s@%s\n" %
+                                      (entrypath, copyfrom_path,
+                                       ent.copyfrom_rev))
+                        # It's probably important for hg that the source
+                        # exists in the revision's parent, not just the
+                        # ent.copyfrom_rev
+                        fromkind = svn.ra.check_path(self.ra, copyfrom_path, ent.copyfrom_rev)
+                        if fromkind != 0:
+                            copies[self.recode(entry)] = self.recode(copyfrom_path)
+                entries.append(self.recode(entry))
+            elif kind == 0: # gone, but had better be a deleted *file*
+                self.ui.debug("gone from %s\n" % ent.copyfrom_rev)
+
+                # if a branch is created but entries are removed in the same
+                # changeset, get the right fromrev
+                # parents cannot be empty here, you cannot remove things from
+                # a root revision.
+                uuid, old_module, fromrev = self.revsplit(parents[0])
+
+                basepath = old_module + "/" + get_entry_from_path(path, module=self.module)
+                entrypath = old_module + "/" + get_entry_from_path(path, module=self.module)
+
+                def lookup_parts(p):
+                    rc = None
+                    parts = p.split("/")
+                    for i in range(len(parts)):
+                        part = "/".join(parts[:i])
+                        info = part, copyfrom.get(part, None)
+                        if info[1] is not None:
+                            self.ui.debug("Found parent directory %s\n" % info[1])
+                            rc = info
+                    return rc
+
+                self.ui.debug("base, entry %s %s\n" % (basepath, entrypath))
+
+                frompath, froment = lookup_parts(entrypath) or (None, revnum - 1)
+
+                # need to remove fragment from lookup_parts and replace with copyfrom_path
+                if frompath is not None:
+                    self.ui.debug("munge-o-matic\n")
+                    self.ui.debug(entrypath + '\n')
+                    self.ui.debug(entrypath[len(frompath):] + '\n')
+                    entrypath = froment.copyfrom_path + entrypath[len(frompath):]
+                    fromrev = froment.copyfrom_rev
+                    self.ui.debug("Info: %s %s %s %s\n" % (frompath, froment, ent, entrypath))
+
+                # We can avoid the reparent calls if the module has not changed
+                # but it probably does not worth the pain.
+                self.reparent('')
+                fromkind = svn.ra.check_path(self.ra, entrypath.strip('/'), fromrev)
+                self.reparent(self.module)
+                
+                if fromkind == svn.core.svn_node_file:   # a deleted file
+                    entries.append(self.recode(entry))
+                elif fromkind == svn.core.svn_node_dir:
+                    # print "Deleted/moved non-file:", revnum, path, ent
+                    # children = self._find_children(path, revnum - 1)
+                    # print "find children %s@%d from %d action %s" % (path, revnum, ent.copyfrom_rev, ent.action)
+                    # Sometimes this is tricky. For example: in
+                    # The Subversion Repository revision 6940 a dir
+                    # was copied and one of its files was deleted
+                    # from the new location in the same commit. This
+                    # code can't deal with that yet.
+                    if ent.action == 'C':
+                        children = self._find_children(path, fromrev)
+                    else:
+                        oroot = entrypath.strip('/')
+                        nroot = path.strip('/')
+                        children = self._find_children(oroot, fromrev)
+                        children = [s.replace(oroot,nroot) for s in children]
+                    # Mark all [files, not directories] as deleted.
+                    for child in children:
+                        # Can we move a child directory and its
+                        # parent in the same commit? (probably can). Could
+                        # cause problems if instead of revnum -1,
+                        # we have to look in (copyfrom_path, revnum - 1)
+                        entrypath = get_entry_from_path("/" + child, module=old_module)
+                        if entrypath:
+                            entry = self.recode(entrypath.decode(self.encoding))
+                            if entry in copies:
+                                # deleted file within a copy
+                                del copies[entry]
+                            else:
+                                entries.append(entry)
+                else:
+                    self.ui.debug('unknown path in revision %d: %s\n' % \
+                                  (revnum, path))
+            elif kind == svn.core.svn_node_dir:
+                # Should probably synthesize normal file entries
+                # and handle as above to clean up copy/rename handling.
+
+                # If the directory just had a prop change,
+                # then we shouldn't need to look for its children.
+                if ent.action == 'M':
+                    continue
+
+                # Also this could create duplicate entries. Not sure
+                # whether this will matter. Maybe should make entries a set.
+                # print "Changed directory", revnum, path, ent.action, ent.copyfrom_path, ent.copyfrom_rev
+                # This will fail if a directory was copied
+                # from another branch and then some of its files
+                # were deleted in the same transaction.
+                children = self._find_children(path, revnum)
+                children.sort()
+                for child in children:
+                    # Can we move a child directory and its
+                    # parent in the same commit? (probably can). Could
+                    # cause problems if instead of revnum -1,
+                    # we have to look in (copyfrom_path, revnum - 1)
+                    entrypath = get_entry_from_path("/" + child, module=self.module)
+                    # print child, self.module, entrypath
+                    if entrypath:
+                        # Need to filter out directories here...
+                        kind = svn.ra.check_path(self.ra, entrypath, revnum)
+                        if kind != svn.core.svn_node_dir:
+                            entries.append(self.recode(entrypath))
+
+                # Copies here (must copy all from source)
+                # Probably not a real problem for us if
+                # source does not exist
+
+                # Can do this with the copy command "hg copy"
+                # if ent.copyfrom_path:
+                #     copyfrom_entry = get_entry_from_path(ent.copyfrom_path.decode(self.encoding),
+                #             module=self.module)
+                #     copyto_entry = entrypath
+                #
+                #     print "copy directory", copyfrom_entry, 'to', copyto_entry
+                #
+                #     copies.append((copyfrom_entry, copyto_entry))
+
+                if ent.copyfrom_path:
+                    copyfrom_path = ent.copyfrom_path.decode(self.encoding)
+                    copyfrom_entry = get_entry_from_path(copyfrom_path, module=self.module)
+                    if copyfrom_entry:
+                        copyfrom[path] = ent
+                        self.ui.debug("mark %s came from %s\n" % (path, copyfrom[path]))
+
+                        # Good, /probably/ a regular copy. Really should check
+                        # to see whether the parent revision actually contains
+                        # the directory in question.
+                        children = self._find_children(self.recode(copyfrom_path), ent.copyfrom_rev)
+                        children.sort()
+                        for child in children:
+                            entrypath = get_entry_from_path("/" + child, module=self.module)
+                            if entrypath:
+                                entry = entrypath.decode(self.encoding)
+                                # print "COPY COPY From", copyfrom_entry, entry
+                                copyto_path = path + entry[len(copyfrom_entry):]
+                                copyto_entry =  get_entry_from_path(copyto_path, module=self.module)
+                                # print "COPY", entry, "COPY To", copyto_entry
+                                copies[self.recode(copyto_entry)] = self.recode(entry)
+                                # copy from quux splort/quuxfile
+
+        return (util.unique(entries), copies)
+
+    def _fetch_revisions(self, from_revnum, to_revnum):
+        if from_revnum < to_revnum:
+            from_revnum, to_revnum = to_revnum, from_revnum
+
         self.child_cset = None
         def parselogentry(orig_paths, revnum, author, date, message):
+            """Return the parsed commit object or None, and True if 
+            the revision is a branch root.
+            """
             self.ui.debug("parsing revision %d (%d changes)\n" %
                           (revnum, len(orig_paths)))
 
-            if revnum in self.modulemap:
-                new_module = self.modulemap[revnum]
-                if new_module != self.module:
-                    self.module = new_module
-                    self.reparent(self.module)
+            branched = False
+            rev = self.revid(revnum)
+            # branch log might return entries for a parent we already have
+
+            if (rev in self.commits or revnum < to_revnum):
+                return None, branched
 
-            copyfrom = {} # Map of entrypath, revision for finding source of deleted revisions.
-            copies = {}
-            entries = []
-            rev = self.revid(revnum)
             parents = []
+            # check whether this revision is the start of a branch or part
+            # of a branch renaming
+            orig_paths = orig_paths.items()
+            orig_paths.sort()
+            root_paths = [(p,e) for p,e in orig_paths if self.module.startswith(p)]
+            if root_paths:
+                path, ent = root_paths[-1]
+                if ent.copyfrom_path:
+                    branched = True
+                    newpath = ent.copyfrom_path + self.module[len(path):]
+                    # ent.copyfrom_rev may not be the actual last revision
+                    previd = self.latest(newpath, ent.copyfrom_rev)
+                    if previd is not None:
+                        parents = [previd]
+                        prevmodule, prevnum = self.revsplit(previd)[1:]
+                        self.ui.note('found parent of branch %s at %d: %s\n' %
+                                     (self.module, prevnum, prevmodule))
+                else:
+                    self.ui.debug("No copyfrom path, don't know what to do.\n")
 
-            # branch log might return entries for a parent we already have
-            if (rev in self.commits or
-                (revnum < self.lastrevs.get(self.module, 0))):
-                return
+            paths = []
+            # filter out unrelated paths
+            for path, ent in orig_paths:
+                if not path.startswith(self.module):
+                    self.ui.debug("boring@%s: %s\n" % (revnum, path))
+                    continue
+                paths.append((path, ent))
 
+            # Example SVN datetime. Includes microseconds.
+            # ISO-8601 conformant
+            # '2007-01-04T17:35:00.902377Z'
+            date = util.parsedate(date[:19] + " UTC", ["%Y-%m-%dT%H:%M:%S"])
+
+            log = message and self.recode(message) or ''
+            author = author and self.recode(author) or ''
             try:
                 branch = self.module.split("/")[-1]
                 if branch == 'trunk':
@@ -363,226 +710,61 @@
             except IndexError:
                 branch = None
 
-            orig_paths = orig_paths.items()
-            orig_paths.sort()
-            for path, ent in orig_paths:
-                # self.ui.write("path %s\n" % path)
-                if path == self.module: # Follow branching back in history
-                    if ent:
-                        if ent.copyfrom_path:
-                            # ent.copyfrom_rev may not be the actual last revision
-                            prev = self.latest(ent.copyfrom_path, ent.copyfrom_rev)
-                            self.modulemap[prev] = ent.copyfrom_path
-                            parents = [self.revid(prev, ent.copyfrom_path)]
-                            self.ui.note('found parent of branch %s at %d: %s\n' % \
-                                         (self.module, prev, ent.copyfrom_path))
-                        else:
-                            self.ui.debug("No copyfrom path, don't know what to do.\n")
-                            # Maybe it was added and there is no more history.
-                entrypath = get_entry_from_path(path, module=self.module)
-                # self.ui.write("entrypath %s\n" % entrypath)
-                if entrypath is None:
-                    # Outside our area of interest
-                    self.ui.debug("boring@%s: %s\n" % (revnum, path))
-                    continue
-                entry = entrypath.decode(self.encoding)
-
-                kind = svn.ra.check_path(self.ra, entrypath, revnum)
-                if kind == svn.core.svn_node_file:
-                    if ent.copyfrom_path:
-                        copyfrom_path = get_entry_from_path(ent.copyfrom_path)
-                        if copyfrom_path:
-                            self.ui.debug("Copied to %s from %s@%s\n" % (entry, copyfrom_path, ent.copyfrom_rev))
-                            # It's probably important for hg that the source
-                            # exists in the revision's parent, not just the
-                            # ent.copyfrom_rev
-                            fromkind = svn.ra.check_path(self.ra, copyfrom_path, ent.copyfrom_rev)
-                            if fromkind != 0:
-                                copies[self.recode(entry)] = self.recode(copyfrom_path)
-                    entries.append(self.recode(entry))
-                elif kind == 0: # gone, but had better be a deleted *file*
-                    self.ui.debug("gone from %s\n" % ent.copyfrom_rev)
-
-                    # if a branch is created but entries are removed in the same
-                    # changeset, get the right fromrev
-                    if parents:
-                        uuid, old_module, fromrev = self.revsplit(parents[0])
-                    else:
-                        fromrev = revnum - 1
-                        # might always need to be revnum - 1 in these 3 lines?
-                        old_module = self.modulemap.get(fromrev, self.module)
-
-                    basepath = old_module + "/" + get_entry_from_path(path, module=self.module)
-                    entrypath = old_module + "/" + get_entry_from_path(path, module=self.module)
-
-                    def lookup_parts(p):
-                        rc = None
-                        parts = p.split("/")
-                        for i in range(len(parts)):
-                            part = "/".join(parts[:i])
-                            info = part, copyfrom.get(part, None)
-                            if info[1] is not None:
-                                self.ui.debug("Found parent directory %s\n" % info[1])
-                                rc = info
-                        return rc
-
-                    self.ui.debug("base, entry %s %s\n" % (basepath, entrypath))
-
-                    frompath, froment = lookup_parts(entrypath) or (None, revnum - 1)
-
-                    # need to remove fragment from lookup_parts and replace with copyfrom_path
-                    if frompath is not None:
-                        self.ui.debug("munge-o-matic\n")
-                        self.ui.debug(entrypath + '\n')
-                        self.ui.debug(entrypath[len(frompath):] + '\n')
-                        entrypath = froment.copyfrom_path + entrypath[len(frompath):]
-                        fromrev = froment.copyfrom_rev
-                        self.ui.debug("Info: %s %s %s %s\n" % (frompath, froment, ent, entrypath))
-
-                    fromkind = svn.ra.check_path(self.ra, entrypath, fromrev)
-                    if fromkind == svn.core.svn_node_file:   # a deleted file
-                        entries.append(self.recode(entry))
-                    elif fromkind == svn.core.svn_node_dir:
-                        # print "Deleted/moved non-file:", revnum, path, ent
-                        # children = self._find_children(path, revnum - 1)
-                        # print "find children %s@%d from %d action %s" % (path, revnum, ent.copyfrom_rev, ent.action)
-                        # Sometimes this is tricky. For example: in
-                        # The Subversion Repository revision 6940 a dir
-                        # was copied and one of its files was deleted
-                        # from the new location in the same commit. This
-                        # code can't deal with that yet.
-                        if ent.action == 'C':
-                            children = self._find_children(path, fromrev)
-                        else:
-                            oroot = entrypath.strip('/')
-                            nroot = path.strip('/')
-                            children = self._find_children(oroot, fromrev)
-                            children = [s.replace(oroot,nroot) for s in children]
-                        # Mark all [files, not directories] as deleted.
-                        for child in children:
-                            # Can we move a child directory and its
-                            # parent in the same commit? (probably can). Could
-                            # cause problems if instead of revnum -1,
-                            # we have to look in (copyfrom_path, revnum - 1)
-                            entrypath = get_entry_from_path("/" + child, module=old_module)
-                            if entrypath:
-                                entry = self.recode(entrypath.decode(self.encoding))
-                                if entry in copies:
-                                    # deleted file within a copy
-                                    del copies[entry]
-                                else:
-                                    entries.append(entry)
-                    else:
-                        self.ui.debug('unknown path in revision %d: %s\n' % \
-                                      (revnum, path))
-                elif kind == svn.core.svn_node_dir:
-                    # Should probably synthesize normal file entries
-                    # and handle as above to clean up copy/rename handling.
-
-                    # If the directory just had a prop change,
-                    # then we shouldn't need to look for its children.
-                    # Also this could create duplicate entries. Not sure
-                    # whether this will matter. Maybe should make entries a set.
-                    # print "Changed directory", revnum, path, ent.action, ent.copyfrom_path, ent.copyfrom_rev
-                    # This will fail if a directory was copied
-                    # from another branch and then some of its files
-                    # were deleted in the same transaction.
-                    children = self._find_children(path, revnum)
-                    children.sort()
-                    for child in children:
-                        # Can we move a child directory and its
-                        # parent in the same commit? (probably can). Could
-                        # cause problems if instead of revnum -1,
-                        # we have to look in (copyfrom_path, revnum - 1)
-                        entrypath = get_entry_from_path("/" + child, module=self.module)
-                        # print child, self.module, entrypath
-                        if entrypath:
-                            # Need to filter out directories here...
-                            kind = svn.ra.check_path(self.ra, entrypath, revnum)
-                            if kind != svn.core.svn_node_dir:
-                                entries.append(self.recode(entrypath))
-
-                    # Copies here (must copy all from source)
-                    # Probably not a real problem for us if
-                    # source does not exist
-
-                    # Can do this with the copy command "hg copy"
-                    # if ent.copyfrom_path:
-                    #     copyfrom_entry = get_entry_from_path(ent.copyfrom_path.decode(self.encoding),
-                    #             module=self.module)
-                    #     copyto_entry = entrypath
-                    #
-                    #     print "copy directory", copyfrom_entry, 'to', copyto_entry
-                    #
-                    #     copies.append((copyfrom_entry, copyto_entry))
-
-                    if ent.copyfrom_path:
-                        copyfrom_path = ent.copyfrom_path.decode(self.encoding)
-                        copyfrom_entry = get_entry_from_path(copyfrom_path, module=self.module)
-                        if copyfrom_entry:
-                            copyfrom[path] = ent
-                            self.ui.debug("mark %s came from %s\n" % (path, copyfrom[path]))
-
-                            # Good, /probably/ a regular copy. Really should check
-                            # to see whether the parent revision actually contains
-                            # the directory in question.
-                            children = self._find_children(self.recode(copyfrom_path), ent.copyfrom_rev)
-                            children.sort()
-                            for child in children:
-                                entrypath = get_entry_from_path("/" + child, module=self.module)
-                                if entrypath:
-                                    entry = entrypath.decode(self.encoding)
-                                    # print "COPY COPY From", copyfrom_entry, entry
-                                    copyto_path = path + entry[len(copyfrom_entry):]
-                                    copyto_entry =  get_entry_from_path(copyto_path, module=self.module)
-                                    # print "COPY", entry, "COPY To", copyto_entry
-                                    copies[self.recode(copyto_entry)] = self.recode(entry)
-                                    # copy from quux splort/quuxfile
-
-            self.modulemap[revnum] = self.module # track backwards in time
-            # a list of (filename, id) where id lets us retrieve the file.
-            # eg in git, id is the object hash. for svn it'll be the
-            self.files[rev] = zip(entries, [rev] * len(entries))
-            if not entries:
-                return
-
-            # Example SVN datetime. Includes microseconds.
-            # ISO-8601 conformant
-            # '2007-01-04T17:35:00.902377Z'
-            date = util.parsedate(date[:18] + " UTC", ["%Y-%m-%dT%H:%M:%S"])
-
-            log = message and self.recode(message)
-            author = author and self.recode(author) or ''
-
             cset = commit(author=author,
                           date=util.datestr(date),
                           desc=log,
                           parents=parents,
-                          copies=copies,
                           branch=branch,
                           rev=rev.encode('utf-8'))
 
             self.commits[rev] = cset
+            # The parents list is *shared* among self.paths and the
+            # commit object. Both will be updated below.
+            self.paths[rev] = (paths, cset.parents)
             if self.child_cset and not self.child_cset.parents:
-                self.child_cset.parents = [rev]
+                self.child_cset.parents[:] = [rev]
             self.child_cset = cset
+            return cset, branched
 
         self.ui.note('fetching revision log for "%s" from %d to %d\n' %
                      (self.module, from_revnum, to_revnum))
 
         try:
-            discover_changed_paths = True
-            strict_node_history = False
-            for entry in self.get_log([self.module], from_revnum, to_revnum):
-                orig_paths, revnum, author, date, message = entry
-                if self.is_blacklisted(revnum):
-                    self.ui.note('skipping blacklisted revision %d\n' % revnum)
-                    continue
-                if orig_paths is None:
-                    self.ui.debug('revision %d has no entries\n' % revnum)
-                    continue
-                parselogentry(orig_paths, revnum, author, date, message)
-        except SubversionException, (_, num):
+            firstcset = None
+            branched = False
+            stream = get_log(self.url, [self.module], from_revnum, to_revnum)
+            try:
+                for entry in stream:
+                    paths, revnum, author, date, message = entry
+                    if self.is_blacklisted(revnum):
+                        self.ui.note('skipping blacklisted revision %d\n' 
+                                     % revnum)
+                        continue
+                    if paths is None:
+                        self.ui.debug('revision %d has no entries\n' % revnum)
+                        continue
+                    cset, branched = parselogentry(paths, revnum, author, 
+                                                   date, message)
+                    if cset:
+                        firstcset = cset
+                    if branched:
+                        break
+            finally:
+                stream.close()
+
+            if not branched and firstcset and not firstcset.parents:
+                # The first revision of the sequence (the last fetched one)
+                # has invalid parents if not a branch root. Find the parent
+                # revision now, if any.
+                try:
+                    firstrevnum = self.revnum(firstcset.rev)
+                    if firstrevnum > 1:
+                        latest = self.latest(self.module, firstrevnum - 1)
+                        if latest:
+                            firstcset.parents.append(latest)
+                except util.Abort:
+                    pass
+        except SubversionException, (inst, num):
             if num == svn.core.SVN_ERR_FS_NO_SUCH_REVISION:
                 raise NoSuchRevision(branch=self,
                     revision="Revision number %d" % to_revnum)
@@ -593,9 +775,9 @@
         # TODO: ra.get_file transmits the whole file instead of diffs.
         mode = ''
         try:
-            revnum = self.revnum(rev)
-            if self.module != self.modulemap[revnum]:
-                self.module = self.modulemap[revnum]
+            new_module, revnum = self.revsplit(rev)[1:]
+            if self.module != new_module:
+                self.module = new_module
                 self.reparent(self.module)
             info = svn.ra.get_file(self.ra, file, revnum, io)
             if isinstance(info, list):
@@ -616,52 +798,244 @@
         return data, mode
 
     def _find_children(self, path, revnum):
-        path = path.strip("/")
+        path = path.strip('/')
+        pool = Pool()
+        rpath = '/'.join([self.base, path]).strip('/')
+        return ['%s/%s' % (path, x) for x in svn.client.ls(rpath, optrev(revnum), True, self.ctx, pool).keys()]
+
+pre_revprop_change = '''#!/bin/sh
+
+REPOS="$1"
+REV="$2"
+USER="$3"
+PROPNAME="$4"
+ACTION="$5"
+
+if [ "$ACTION" = "M" -a "$PROPNAME" = "svn:log" ]; then exit 0; fi
+if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-branch" ]; then exit 0; fi
+if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-rev" ]; then exit 0; fi
+
+echo "Changing prohibited revision property" >&2
+exit 1
+'''
+
+class svn_sink(converter_sink, commandline):
+    commit_re = re.compile(r'Committed revision (\d+).', re.M)
 
-        def _find_children_fallback(path, revnum):
-            # SWIG python bindings for getdir are broken up to at least 1.4.3
-            pool = Pool()
-            optrev = svn.core.svn_opt_revision_t()
-            optrev.kind = svn.core.svn_opt_revision_number
-            optrev.value.number = revnum
-            rpath = '/'.join([self.base, path]).strip('/')
-            return ['%s/%s' % (path, x) for x in svn.client.ls(rpath, optrev, True, self.ctx, pool).keys()]
+    def prerun(self):
+        if self.wc:
+            os.chdir(self.wc)
+
+    def postrun(self):
+        if self.wc:
+            os.chdir(self.cwd)
+
+    def join(self, name):
+        return os.path.join(self.wc, '.svn', name)
+
+    def revmapfile(self):
+        return self.join('hg-shamap')
+
+    def authorfile(self):
+        return self.join('hg-authormap')
+
+    def __init__(self, ui, path):
+        converter_sink.__init__(self, ui, path)
+        commandline.__init__(self, ui, 'svn')
+        self.delete = []
+        self.setexec = []
+        self.delexec = []
+        self.copies = []
+        self.wc = None
+        self.cwd = os.getcwd()
+
+        path = os.path.realpath(path)
 
-        if hasattr(self, '_find_children_fallback'):
-            return _find_children_fallback(path, revnum)
+        created = False
+        if os.path.isfile(os.path.join(path, '.svn', 'entries')):
+            self.wc = path
+            self.run0('update')
+        else:
+            wcpath = os.path.join(os.getcwd(), os.path.basename(path) + '-wc')
 
-        self.reparent("/" + path)
-        pool = Pool()
+            if os.path.isdir(os.path.dirname(path)):
+                if not os.path.exists(os.path.join(path, 'db', 'fs-type')):
+                    ui.status(_('initializing svn repo %r\n') %
+                              os.path.basename(path))
+                    commandline(ui, 'svnadmin').run0('create', path)
+                    created = path
+                path = util.normpath(path)
+                if not path.startswith('/'):
+                    path = '/' + path
+                path = 'file://' + path
+
+            ui.status(_('initializing svn wc %r\n') % os.path.basename(wcpath))
+            self.run0('checkout', path, wcpath)
+
+            self.wc = wcpath
+        self.opener = util.opener(self.wc)
+        self.wopener = util.opener(self.wc)
+        self.childmap = mapfile(ui, self.join('hg-childmap'))
+        self.is_exec = util.checkexec(self.wc) and util.is_exec or None
 
-        children = []
-        def find_children_inner(children, path, revnum = revnum):
-            if hasattr(svn.ra, 'get_dir2'): # Since SVN 1.4
-                fields = 0xffffffff # Binding does not provide SVN_DIRENT_ALL
-                getdir = svn.ra.get_dir2(self.ra, path, revnum, fields, pool)
+        if created:
+            hook = os.path.join(created, 'hooks', 'pre-revprop-change')
+            fp = open(hook, 'w')
+            fp.write(pre_revprop_change)
+            fp.close()
+            util.set_flags(hook, "x")
+
+        xport = transport.SvnRaTransport(url=geturl(path))
+        self.uuid = svn.ra.get_uuid(xport.ra)
+
+    def wjoin(self, *names):
+        return os.path.join(self.wc, *names)
+
+    def putfile(self, filename, flags, data):
+        if 'l' in flags:
+            self.wopener.symlink(data, filename)
+        else:
+            try:
+                if os.path.islink(self.wjoin(filename)):
+                    os.unlink(filename)
+            except OSError:
+                pass
+            self.wopener(filename, 'w').write(data)
+
+            if self.is_exec:
+                was_exec = self.is_exec(self.wjoin(filename))
+            else:
+                # On filesystems not supporting execute-bit, there is no way
+                # to know if it is set but asking subversion. Setting it
+                # systematically is just as expensive and much simpler.
+                was_exec = 'x' not in flags
+
+            util.set_flags(self.wjoin(filename), flags)
+            if was_exec:
+                if 'x' not in flags:
+                    self.delexec.append(filename)
             else:
-                getdir = svn.ra.get_dir(self.ra, path, revnum, pool)
-            if type(getdir) == dict:
-                # python binding for getdir is broken up to at least 1.4.3
-                raise CompatibilityException()
-            dirents = getdir[0]
-            if type(dirents) == int:
-                # got here once due to infinite recursion bug
-                return
-            c = dirents.keys()
-            c.sort()
-            for child in c:
-                dirent = dirents[child]
-                if dirent.kind == svn.core.svn_node_dir:
-                    find_children_inner(children, (path + "/" + child).strip("/"))
-                else:
-                    children.append((path + "/" + child).strip("/"))
+                if 'x' in flags:
+                    self.setexec.append(filename)
+
+    def delfile(self, name):
+        self.delete.append(name)
+
+    def copyfile(self, source, dest):
+        self.copies.append([source, dest])
+
+    def _copyfile(self, source, dest):
+        # SVN's copy command pukes if the destination file exists, but
+        # our copyfile method expects to record a copy that has
+        # already occurred.  Cross the semantic gap.
+        wdest = self.wjoin(dest)
+        exists = os.path.exists(wdest)
+        if exists:
+            fd, tempname = tempfile.mkstemp(
+                prefix='hg-copy-', dir=os.path.dirname(wdest))
+            os.close(fd)
+            os.unlink(tempname)
+            os.rename(wdest, tempname)
+        try:
+            self.run0('copy', source, dest)
+        finally:
+            if exists:
+                try:
+                    os.unlink(wdest)
+                except OSError:
+                    pass
+                os.rename(tempname, wdest)
+
+    def dirs_of(self, files):
+        dirs = set()
+        for f in files:
+            if os.path.isdir(self.wjoin(f)):
+                dirs.add(f)
+            for i in strutil.rfindall(f, '/'):
+                dirs.add(f[:i])
+        return dirs
+
+    def add_dirs(self, files):
+        add_dirs = [d for d in self.dirs_of(files)
+                    if not os.path.exists(self.wjoin(d, '.svn', 'entries'))]
+        if add_dirs:
+            add_dirs.sort()
+            self.xargs(add_dirs, 'add', non_recursive=True, quiet=True)
+        return add_dirs
+
+    def add_files(self, files):
+        if files:
+            self.xargs(files, 'add', quiet=True)
+        return files
 
+    def tidy_dirs(self, names):
+        dirs = list(self.dirs_of(names))
+        dirs.sort(reverse=True)
+        deleted = []
+        for d in dirs:
+            wd = self.wjoin(d)
+            if os.listdir(wd) == '.svn':
+                self.run0('delete', d)
+                deleted.append(d)
+        return deleted
+
+    def addchild(self, parent, child):
+        self.childmap[parent] = child
+
+    def revid(self, rev):
+        return u"svn:%s@%s" % (self.uuid, rev)
+
+    def putcommit(self, files, parents, commit):
+        for parent in parents:
+            try:
+                return self.revid(self.childmap[parent])
+            except KeyError:
+                pass
+        entries = set(self.delete)
+        files = util.frozenset(files)
+        entries.update(self.add_dirs(files.difference(entries)))
+        if self.copies:
+            for s, d in self.copies:
+                self._copyfile(s, d)
+            self.copies = []
+        if self.delete:
+            self.xargs(self.delete, 'delete')
+            self.delete = []
+        entries.update(self.add_files(files.difference(entries)))
+        entries.update(self.tidy_dirs(entries))
+        if self.delexec:
+            self.xargs(self.delexec, 'propdel', 'svn:executable')
+            self.delexec = []
+        if self.setexec:
+            self.xargs(self.setexec, 'propset', 'svn:executable', '*')
+            self.setexec = []
+
+        fd, messagefile = tempfile.mkstemp(prefix='hg-convert-')
+        fp = os.fdopen(fd, 'w')
+        fp.write(commit.desc)
+        fp.close()
         try:
-            find_children_inner(children, "")
-        except CompatibilityException:
-            self._find_children_fallback = True
-            self.reparent(self.module)
-            return _find_children_fallback(path, revnum)
+            output = self.run0('commit',
+                               username=util.shortuser(commit.author),
+                               file=messagefile,
+                               encoding='utf-8')
+            try:
+                rev = self.commit_re.search(output).group(1)
+            except AttributeError:
+                self.ui.warn(_('unexpected svn output:\n'))
+                self.ui.warn(output)
+                raise util.Abort(_('unable to cope with svn output'))
+            if commit.rev:
+                self.run('propset', 'hg:convert-rev', commit.rev,
+                         revprop=True, revision=rev)
+            if commit.branch and commit.branch != 'default':
+                self.run('propset', 'hg:convert-branch', commit.branch,
+                         revprop=True, revision=rev)
+            for parent in parents:
+                self.addchild(parent, rev)
+            return self.revid(rev)
+        finally:
+            os.unlink(messagefile)
 
-        self.reparent(self.module)
-        return [path + "/" + c for c in children]
+    def puttags(self, tags):
+        self.ui.warn(_('XXX TAGS NOT IMPLEMENTED YET\n'))
--- a/hgext/convert/transport.py	Thu Jul 26 07:56:27 2007 -0400
+++ b/hgext/convert/transport.py	Wed Feb 06 19:57:52 2008 -0800
@@ -47,6 +47,10 @@
         svn.client.get_ssl_client_cert_pw_file_provider(pool),
         svn.client.get_ssl_server_trust_file_provider(pool),
         ]
+    # Platform-dependant authentication methods
+    if hasattr(svn.client, 'get_windows_simple_provider'):
+        providers.append(svn.client.get_windows_simple_provider(pool))
+
     return svn.core.svn_auth_open(providers, pool)
 
 class NotBranchError(SubversionException):
@@ -77,7 +81,7 @@
                 self.ra = svn.client.open_ra_session(
                     self.svn_url.encode('utf8'),
                     self.client, self.pool)
-            except SubversionException, (_, num):
+            except SubversionException, (inst, num):
                 if num in (svn.core.SVN_ERR_RA_ILLEGAL_URL,
                            svn.core.SVN_ERR_RA_LOCAL_REPOS_OPEN_FAILED,
                            svn.core.SVN_ERR_BAD_URL):
--- a/hgext/extdiff.py	Thu Jul 26 07:56:27 2007 -0400
+++ b/hgext/extdiff.py	Wed Feb 06 19:57:52 2008 -0800
@@ -4,106 +4,103 @@
 #
 # This software may be used and distributed according to the terms
 # of the GNU General Public License, incorporated herein by reference.
-#
-# The `extdiff' Mercurial extension allows you to use external programs
-# to compare revisions, or revision with working dir.  The external diff
-# programs are called with a configurable set of options and two
-# non-option arguments: paths to directories containing snapshots of
-# files to compare.
-#
-# To enable this extension:
-#
-#   [extensions]
-#   hgext.extdiff =
-#
-# The `extdiff' extension also allows to configure new diff commands, so
-# you do not need to type "hg extdiff -p kdiff3" always.
-#
-#   [extdiff]
-#   # add new command that runs GNU diff(1) in 'context diff' mode
-#   cmd.cdiff = gdiff
-#   opts.cdiff = -Nprc5
+
+'''
+The `extdiff' Mercurial extension allows you to use external programs
+to compare revisions, or revision with working dir.  The external diff
+programs are called with a configurable set of options and two
+non-option arguments: paths to directories containing snapshots of
+files to compare.
+
+To enable this extension:
+
+  [extensions]
+  hgext.extdiff =
+
+The `extdiff' extension also allows to configure new diff commands, so
+you do not need to type "hg extdiff -p kdiff3" always.
 
-#   # add new command called vdiff, runs kdiff3
-#   cmd.vdiff = kdiff3
+  [extdiff]
+  # add new command that runs GNU diff(1) in 'context diff' mode
+  cdiff = gdiff -Nprc5
+  ## or the old way:
+  #cmd.cdiff = gdiff
+  #opts.cdiff = -Nprc5
 
-#   # add new command called meld, runs meld (no need to name twice)
-#   cmd.meld =
+  # add new command called vdiff, runs kdiff3
+  vdiff = kdiff3
 
-#   # add new command called vimdiff, runs gvimdiff with DirDiff plugin
-#   #(see http://www.vim.org/scripts/script.php?script_id=102)
-#   # Non english user, be sure to put "let g:DirDiffDynamicDiffText = 1" in
-#   # your .vimrc
-#   cmd.vimdiff = gvim
-#   opts.vimdiff = -f '+next' '+execute "DirDiff" argv(0) argv(1)'
-#
-# Each custom diff commands can have two parts: a `cmd' and an `opts'
-# part.  The cmd.xxx option defines the name of an executable program
-# that will be run, and opts.xxx defines a set of command-line options
-# which will be inserted to the command between the program name and
-# the files/directories to diff (i.e. the cdiff example above).
-#
-# You can use -I/-X and list of file or directory names like normal
-# "hg diff" command.  The `extdiff' extension makes snapshots of only
-# needed files, so running the external diff program will actually be
-# pretty fast (at least faster than having to compare the entire tree).
+  # add new command called meld, runs meld (no need to name twice)
+  meld =
+
+  # add new command called vimdiff, runs gvimdiff with DirDiff plugin
+  #(see http://www.vim.org/scripts/script.php?script_id=102)
+  # Non english user, be sure to put "let g:DirDiffDynamicDiffText = 1" in
+  # your .vimrc
+  vimdiff = gvim -f '+next' '+execute "DirDiff" argv(0) argv(1)'
+
+You can use -I/-X and list of file or directory names like normal
+"hg diff" command.  The `extdiff' extension makes snapshots of only
+needed files, so running the external diff program will actually be
+pretty fast (at least faster than having to compare the entire tree).
+'''
 
 from mercurial.i18n import _
 from mercurial.node import *
-from mercurial import cmdutil, util
-import os, shutil, tempfile
+from mercurial import cmdutil, util, commands
+import os, shlex, shutil, tempfile
+
+def snapshot_node(ui, repo, files, node, tmproot):
+    '''snapshot files as of some revision'''
+    mf = repo.changectx(node).manifest()
+    dirname = os.path.basename(repo.root)
+    if dirname == "":
+        dirname = "root"
+    dirname = '%s.%s' % (dirname, short(node))
+    base = os.path.join(tmproot, dirname)
+    os.mkdir(base)
+    ui.note(_('making snapshot of %d files from rev %s\n') %
+            (len(files), short(node)))
+    for fn in files:
+        if not fn in mf:
+            # skipping new file after a merge ?
+            continue
+        wfn = util.pconvert(fn)
+        ui.note('  %s\n' % wfn)
+        dest = os.path.join(base, wfn)
+        destdir = os.path.dirname(dest)
+        if not os.path.isdir(destdir):
+            os.makedirs(destdir)
+        data = repo.wwritedata(wfn, repo.file(wfn).read(mf[wfn]))
+        open(dest, 'wb').write(data)
+    return dirname
+
+
+def snapshot_wdir(ui, repo, files, tmproot):
+    '''snapshot files from working directory.
+    if not using snapshot, -I/-X does not work and recursive diff
+    in tools like kdiff3 and meld displays too many files.'''
+    dirname = os.path.basename(repo.root)
+    if dirname == "":
+        dirname = "root"
+    base = os.path.join(tmproot, dirname)
+    os.mkdir(base)
+    ui.note(_('making snapshot of %d files from working dir\n') %
+            (len(files)))
+    for fn in files:
+        wfn = util.pconvert(fn)
+        ui.note('  %s\n' % wfn)
+        dest = os.path.join(base, wfn)
+        destdir = os.path.dirname(dest)
+        if not os.path.isdir(destdir):
+            os.makedirs(destdir)
+        fp = open(dest, 'wb')
+        for chunk in util.filechunkiter(repo.wopener(wfn)):
+            fp.write(chunk)
+    return dirname
+
 
 def dodiff(ui, repo, diffcmd, diffopts, pats, opts):
-    def snapshot_node(files, node):
-        '''snapshot files as of some revision'''
-        mf = repo.changectx(node).manifest()
-        dirname = os.path.basename(repo.root)
-        if dirname == "":
-            dirname = "root"
-        dirname = '%s.%s' % (dirname, short(node))
-        base = os.path.join(tmproot, dirname)
-        os.mkdir(base)
-        if not ui.quiet:
-            ui.write_err(_('making snapshot of %d files from rev %s\n') %
-                         (len(files), short(node)))
-        for fn in files:
-            if not fn in mf:
-                # skipping new file after a merge ?
-                continue
-            wfn = util.pconvert(fn)
-            ui.note('  %s\n' % wfn)
-            dest = os.path.join(base, wfn)
-            destdir = os.path.dirname(dest)
-            if not os.path.isdir(destdir):
-                os.makedirs(destdir)
-            data = repo.wwritedata(wfn, repo.file(wfn).read(mf[wfn]))
-            open(dest, 'wb').write(data)
-        return dirname
-
-    def snapshot_wdir(files):
-        '''snapshot files from working directory.
-        if not using snapshot, -I/-X does not work and recursive diff
-        in tools like kdiff3 and meld displays too many files.'''
-        dirname = os.path.basename(repo.root)
-        if dirname == "":
-            dirname = "root"
-        base = os.path.join(tmproot, dirname)
-        os.mkdir(base)
-        if not ui.quiet:
-            ui.write_err(_('making snapshot of %d files from working dir\n') %
-                         (len(files)))
-        for fn in files:
-            wfn = util.pconvert(fn)
-            ui.note('  %s\n' % wfn)
-            dest = os.path.join(base, wfn)
-            destdir = os.path.dirname(dest)
-            if not os.path.isdir(destdir):
-                os.makedirs(destdir)
-            fp = open(dest, 'wb')
-            for chunk in util.filechunkiter(repo.wopener(wfn)):
-                fp.write(chunk)
-        return dirname
-
     node1, node2 = cmdutil.revpair(repo, opts['rev'])
     files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
     modified, added, removed, deleted, unknown = repo.status(
@@ -112,12 +109,34 @@
         return 0
 
     tmproot = tempfile.mkdtemp(prefix='extdiff.')
+    dir2root = ''
     try:
-        dir1 = snapshot_node(modified + removed, node1)
+        # Always make a copy of node1
+        dir1 = snapshot_node(ui, repo, modified + removed, node1, tmproot)
+        changes = len(modified) + len(removed) + len(added)
+
+        # If node2 in not the wc or there is >1 change, copy it
         if node2:
-            dir2 = snapshot_node(modified + added, node2)
+            dir2 = snapshot_node(ui, repo, modified + added, node2, tmproot)
+        elif changes > 1:
+            dir2 = snapshot_wdir(ui, repo, modified + added, tmproot)
         else:
-            dir2 = snapshot_wdir(modified + added)
+            # This lets the diff tool open the changed file directly
+            dir2 = ''
+            dir2root = repo.root
+
+        # If only one change, diff the files instead of the directories
+        if changes == 1 :
+            if len(modified):
+                dir1 = os.path.join(dir1, util.localpath(modified[0]))
+                dir2 = os.path.join(dir2root, dir2, util.localpath(modified[0]))
+            elif len(removed) :
+                dir1 = os.path.join(dir1, util.localpath(removed[0]))
+                dir2 = os.devnull
+            else:
+                dir1 = os.devnull
+                dir2 = os.path.join(dir2root, dir2, util.localpath(added[0]))
+
         cmdline = ('%s %s %s %s' %
                    (util.shellquote(diffcmd), ' '.join(diffopts),
                     util.shellquote(dir1), util.shellquote(dir2)))
@@ -158,33 +177,41 @@
      [('p', 'program', '', _('comparison program to run')),
       ('o', 'option', [], _('pass option to comparison program')),
       ('r', 'rev', [], _('revision')),
-      ('I', 'include', [], _('include names matching the given patterns')),
-      ('X', 'exclude', [], _('exclude names matching the given patterns'))],
+     ] + commands.walkopts,
      _('hg extdiff [OPT]... [FILE]...')),
     }
 
 def uisetup(ui):
     for cmd, path in ui.configitems('extdiff'):
-        if not cmd.startswith('cmd.'): continue
-        cmd = cmd[4:]
-        if not path: path = cmd
-        diffopts = ui.config('extdiff', 'opts.' + cmd, '')
-        diffopts = diffopts and [diffopts] or []
+        if cmd.startswith('cmd.'):
+            cmd = cmd[4:]
+            if not path: path = cmd
+            diffopts = ui.config('extdiff', 'opts.' + cmd, '')
+            diffopts = diffopts and [diffopts] or []
+        elif cmd.startswith('opts.'):
+            continue
+        else:
+            # command = path opts
+            if path:
+                diffopts = shlex.split(path)
+                path = diffopts.pop(0)
+            else:
+                path, diffopts = cmd, []
         def save(cmd, path, diffopts):
             '''use closure to save diff command to use'''
             def mydiff(ui, repo, *pats, **opts):
                 return dodiff(ui, repo, path, diffopts, pats, opts)
-            mydiff.__doc__ = '''use %(path)r to diff repository (or selected files)
+            mydiff.__doc__ = '''use %(path)s to diff repository (or selected files)
 
             Show differences between revisions for the specified
-            files, using the %(path)r program.
+            files, using the %(path)s program.
 
             When two revision arguments are given, then changes are
             shown between those revisions. If only one revision is
             specified then that revision is compared to the working
             directory, and, when no revisions are specified, the
             working directory files are compared to its parent.''' % {
-                'path': path,
+                'path': util.uirepr(path),
                 }
             return mydiff
         cmdtable[cmd] = (save(cmd, path, diffopts),
--- a/hgext/fetch.py	Thu Jul 26 07:56:27 2007 -0400
+++ b/hgext/fetch.py	Wed Feb 06 19:57:52 2008 -0800
@@ -43,7 +43,8 @@
         if not err:
             mod, add, rem = repo.status()[:3]
             message = (cmdutil.logmessage(opts) or
-                       (_('Automated merge with %s') % other.url()))
+                       (_('Automated merge with %s') %
+                        util.removeauth(other.url())))
             n = repo.commit(mod + add + rem, message,
                             opts['user'], opts['date'],
                             force_editor=opts.get('force_editor'))
@@ -54,7 +55,8 @@
         cmdutil.setremoteconfig(ui, opts)
 
         other = hg.repository(ui, ui.expandpath(source))
-        ui.status(_('pulling from %s\n') % ui.expandpath(source))
+        ui.status(_('pulling from %s\n') %
+                  util.hidepassword(ui.expandpath(source)))
         revs = None
         if opts['rev'] and not other.local():
             raise util.Abort(_("fetch -r doesn't work for remote repositories yet"))
@@ -86,13 +88,8 @@
 cmdtable = {
     'fetch':
         (fetch,
-        [('e', 'ssh', '', _('specify ssh command to use')),
-         ('m', 'message', '', _('use <text> as commit message')),
-         ('l', 'logfile', '', _('read the commit message from <file>')),
-         ('d', 'date', '', _('record datecode as commit date')),
-         ('u', 'user', '', _('record user as commiter')),
-         ('r', 'rev', [], _('a specific revision you would like to pull')),
+        [('r', 'rev', [], _('a specific revision you would like to pull')),
          ('f', 'force-editor', None, _('edit commit message')),
-         ('', 'remotecmd', '', _('hg command to run on the remote side'))],
+        ] + commands.commitopts + commands.commitopts2 + commands.remoteopts,
         _('hg fetch [SOURCE]')),
 }
--- a/hgext/gpg.py	Thu Jul 26 07:56:27 2007 -0400
+++ b/hgext/gpg.py	Wed Feb 06 19:57:52 2008 -0800
@@ -6,7 +6,7 @@
 # of the GNU General Public License, incorporated herein by reference.
 
 import os, tempfile, binascii
-from mercurial import util
+from mercurial import util, commands
 from mercurial import node as hgnode
 from mercurial.i18n import _
 
@@ -249,7 +249,7 @@
     message = opts['message']
     if not message:
         message = "\n".join([_("Added signature for changeset %s")
-                             % hgnode.hex(n)
+                             % hgnode.short(n)
                              for n in nodes])
     try:
         repo.commit([".hgsigs"], message, opts['user'], opts['date'])
@@ -269,10 +269,9 @@
          [('l', 'local', None, _('make the signature local')),
           ('f', 'force', None, _('sign even if the sigfile is modified')),
           ('', 'no-commit', None, _('do not commit the sigfile after signing')),
+          ('k', 'key', '', _('the key id to sign with')),
           ('m', 'message', '', _('commit message')),
-          ('d', 'date', '', _('date code')),
-          ('u', 'user', '', _('user')),
-          ('k', 'key', '', _('the key id to sign with'))],
+         ] + commands.commitopts2,
          _('hg sign [OPTION]... [REVISION]...')),
     "sigcheck": (check, [], _('hg sigcheck REVISION')),
     "sigs": (sigs, [], _('hg sigs')),
--- a/hgext/graphlog.py	Thu Jul 26 07:56:27 2007 -0400
+++ b/hgext/graphlog.py	Wed Feb 06 19:57:52 2008 -0800
@@ -5,11 +5,12 @@
 # This software may be used and distributed according to the terms of
 # the GNU General Public License, incorporated herein by reference.
 
+import os
 import sys
 from mercurial.cmdutil import revrange, show_changeset
 from mercurial.i18n import _
 from mercurial.node import nullid, nullrev
-from mercurial.util import Abort
+from mercurial.util import Abort, canonpath
 
 def revision_grapher(repo, start_rev, stop_rev):
     """incremental revision grapher
@@ -63,6 +64,62 @@
         revs = next_revs
         curr_rev -= 1
 
+def filelog_grapher(repo, path, start_rev, stop_rev):
+    """incremental file log grapher
+
+    This generator function walks through the revision history of a
+    single file from revision start_rev to revision stop_rev (which must
+    be less than or equal to start_rev) and for each revision emits
+    tuples with the following elements:
+
+      - Current revision.
+      - Current node.
+      - Column of the current node in the set of ongoing edges.
+      - Edges; a list of (col, next_col) indicating the edges between
+        the current node and its parents.
+      - Number of columns (ongoing edges) in the current revision.
+      - The difference between the number of columns (ongoing edges)
+        in the next revision and the number of columns (ongoing edges)
+        in the current revision. That is: -1 means one column removed;
+        0 means no columns added or removed; 1 means one column added.
+    """
+
+    assert start_rev >= stop_rev
+    curr_rev = start_rev
+    revs = []
+    filerev = repo.file(path).count() - 1
+    while filerev >= 0:
+        fctx = repo.filectx(path, fileid=filerev)
+
+        # Compute revs and next_revs.
+        if filerev not in revs:
+            revs.append(filerev)
+        rev_index = revs.index(filerev)
+        next_revs = revs[:]
+
+        # Add parents to next_revs.
+        parents = [f.filerev() for f in fctx.parents() if f.path() == path]
+        parents_to_add = []
+        for parent in parents:
+            if parent not in next_revs:
+                parents_to_add.append(parent)
+        parents_to_add.sort()
+        next_revs[rev_index:rev_index + 1] = parents_to_add
+
+        edges = []
+        for parent in parents:
+            edges.append((rev_index, next_revs.index(parent)))
+
+        changerev = fctx.linkrev()
+        if changerev <= start_rev:
+            node = repo.changelog.node(changerev)
+            n_columns_diff = len(next_revs) - len(revs)
+            yield (changerev, node, rev_index, edges, len(revs), n_columns_diff)
+        if changerev <= stop_rev:
+            break
+        revs = next_revs
+        filerev -= 1
+
 def get_rev_parents(repo, rev):
     return [x for x in repo.changelog.parentrevs(rev) if x != nullrev]
 
@@ -141,7 +198,7 @@
     else:
         return (repo.changelog.count() - 1, 0)
 
-def graphlog(ui, repo, **opts):
+def graphlog(ui, repo, path=None, **opts):
     """show revision history alongside an ASCII revision graph
 
     Print a revision history alongside a revision graph drawn with
@@ -157,7 +214,11 @@
     if start_rev == nullrev:
         return
     cs_printer = show_changeset(ui, repo, opts)
-    grapher = revision_grapher(repo, start_rev, stop_rev)
+    if path:
+        cpath = canonpath(repo.root, os.getcwd(), path)
+        grapher = filelog_grapher(repo, cpath, start_rev, stop_rev)
+    else:
+        grapher = revision_grapher(repo, start_rev, stop_rev)
     repo_parents = repo.dirstate.parents()
     prev_n_columns_diff = 0
     prev_node_index = 0
@@ -261,5 +322,5 @@
           ('r', 'rev', [], _('show the specified revision or range')),
           ('', 'style', '', _('display using template map file')),
           ('', 'template', '', _('display with template'))],
-         _('hg glog [OPTION]...')),
+         _('hg glog [OPTION]... [FILE]')),
 }
--- a/hgext/hbisect.py	Thu Jul 26 07:56:27 2007 -0400
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,312 +0,0 @@
-# bisect extension for mercurial
-#
-# Copyright 2005, 2006 Benoit Boissinot <benoit.boissinot@ens-lyon.org>
-# Inspired by git bisect, extension skeleton taken from mq.py.
-#
-# This software may be used and distributed according to the terms
-# of the GNU General Public License, incorporated herein by reference.
-
-from mercurial.i18n import _
-from mercurial import hg, util, commands, cmdutil
-import os, sys, sets
-
-versionstr = "0.0.3"
-
-def lookup_rev(ui, repo, rev=None):
-    """returns rev or the checked-out revision if rev is None"""
-    if not rev is None:
-        return repo.lookup(rev)
-    parents = [p for p in repo.dirstate.parents() if p != hg.nullid]
-    if len(parents) != 1:
-        raise util.Abort(_("unexpected number of parents, "
-                           "please commit or revert"))
-    return parents.pop()
-
-def check_clean(ui, repo):
-    modified, added, removed, deleted, unknown = repo.status()[:5]
-    if modified or added or removed:
-        ui.warn("Repository is not clean, please commit or revert\n")
-        sys.exit(1)
-
-class bisect(object):
-    """dichotomic search in the DAG of changesets"""
-    def __init__(self, ui, repo):
-        self.repo = repo
-        self.path = repo.join("bisect")
-        self.opener = util.opener(self.path)
-        self.ui = ui
-        self.goodrevs = []
-        self.badrev = None
-        self.good_dirty = 0
-        self.bad_dirty = 0
-        self.good_path = "good"
-        self.bad_path = "bad"
-
-        if os.path.exists(os.path.join(self.path, self.good_path)):
-            self.goodrevs = self.opener(self.good_path).read().splitlines()
-            self.goodrevs = [hg.bin(x) for x in self.goodrevs]
-        if os.path.exists(os.path.join(self.path, self.bad_path)):
-            r = self.opener(self.bad_path).read().splitlines()
-            if r:
-                self.badrev = hg.bin(r.pop(0))
-
-    def write(self):
-        if not os.path.isdir(self.path):
-            return
-        f = self.opener(self.good_path, "w")
-        f.write("\n".join([hg.hex(r) for r in  self.goodrevs]))
-        if len(self.goodrevs) > 0:
-            f.write("\n")
-        f = self.opener(self.bad_path, "w")
-        if self.badrev:
-            f.write(hg.hex(self.badrev) + "\n")
-
-    def init(self):
-        """start a new bisection"""
-        if os.path.isdir(self.path):
-            raise util.Abort(_("bisect directory already exists\n"))
-        os.mkdir(self.path)
-        check_clean(self.ui, self.repo)
-        return 0
-
-    def reset(self):
-        """finish a bisection"""
-        if os.path.isdir(self.path):
-            sl = [os.path.join(self.path, p)
-                  for p in [self.bad_path, self.good_path]]
-            for s in sl:
-                if os.path.exists(s):
-                    os.unlink(s)
-            os.rmdir(self.path)
-        # Not sure about this
-        #self.ui.write("Going back to tip\n")
-        #self.repo.update(self.repo.changelog.tip())
-        return 1
-
-    def num_ancestors(self, head=None, stop=None):
-        """
-        returns a dict with the mapping:
-        node -> number of ancestors (self included)
-        for all nodes who are ancestor of head and
-        not in stop.
-        """
-        if head is None:
-            head = self.badrev
-        return self.__ancestors_and_nb_ancestors(head, stop)[1]
-
-    def ancestors(self, head=None, stop=None):
-        """
-        returns the set of the ancestors of head (self included)
-        who are not in stop.
-        """
-        if head is None:
-            head = self.badrev
-        return self.__ancestors_and_nb_ancestors(head, stop)[0]
-
-    def __ancestors_and_nb_ancestors(self, head, stop=None):
-        """
-        if stop is None then ancestors of goodrevs are used as
-        lower limit.
-
-        returns (anc, n_child) where anc is the set of the ancestors of head
-        and n_child is a dictionary with the following mapping:
-        node -> number of ancestors (self included)
-        """
-        cl = self.repo.changelog
-        if not stop:
-            stop = sets.Set([])
-            for i in xrange(len(self.goodrevs)-1, -1, -1):
-                g = self.goodrevs[i]
-                if g in stop:
-                    continue
-                stop.update(cl.reachable(g))
-        def num_children(a):
-            """
-            returns a dictionnary with the following mapping
-            node -> [number of children, empty set]
-            """
-            d = {a: [0, sets.Set([])]}
-            for i in xrange(cl.rev(a)+1):
-                n = cl.node(i)
-                if not d.has_key(n):
-                    d[n] = [0, sets.Set([])]
-                parents = [p for p in cl.parents(n) if p != hg.nullid]
-                for p in parents:
-                    d[p][0] += 1
-            return d
-
-        if head in stop:
-            raise util.Abort(_("Inconsistent state, %s:%s is good and bad")
-                             % (cl.rev(head), hg.short(head)))
-        n_child = num_children(head)
-        for i in xrange(cl.rev(head)+1):
-            n = cl.node(i)
-            parents = [p for p in cl.parents(n) if p != hg.nullid]
-            for p in parents:
-                n_child[p][0] -= 1
-                if not n in stop:
-                    n_child[n][1].union_update(n_child[p][1])
-                if n_child[p][0] == 0:
-                    n_child[p] = len(n_child[p][1])
-            if not n in stop:
-                n_child[n][1].add(n)
-            if n_child[n][0] == 0:
-                if n == head:
-                    anc = n_child[n][1]
-                n_child[n] = len(n_child[n][1])
-        return anc, n_child
-
-    def next(self):
-        if not self.badrev:
-            raise util.Abort(_("You should give at least one bad revision"))
-        if not self.goodrevs:
-            self.ui.warn(_("No good revision given\n"))
-            self.ui.warn(_("Marking the first revision as good\n"))
-        ancestors, num_ancestors = self.__ancestors_and_nb_ancestors(
-                                    self.badrev)
-        tot = len(ancestors)
-        if tot == 1:
-            if ancestors.pop() != self.badrev:
-                raise util.Abort(_("Could not find the first bad revision"))
-            self.ui.write(_("The first bad revision is:\n"))
-            displayer = cmdutil.show_changeset(self.ui, self.repo, {})
-            displayer.show(changenode=self.badrev)
-            return None
-        best_rev = None
-        best_len = -1
-        for n in ancestors:
-            l = num_ancestors[n]
-            l = min(l, tot - l)
-            if l > best_len:
-                best_len = l
-                best_rev = n
-        assert best_rev is not None
-        nb_tests = 0
-        q, r = divmod(tot, 2)
-        while q:
-            nb_tests += 1
-            q, r = divmod(q, 2)
-        msg = _("Testing changeset %s:%s (%s changesets remaining, "
-                "~%s tests)\n") % (self.repo.changelog.rev(best_rev),
-                                   hg.short(best_rev), tot, nb_tests)
-        self.ui.write(msg)
-        return best_rev
-
-    def autonext(self):
-        """find and update to the next revision to test"""
-        check_clean(self.ui, self.repo)
-        rev = self.next()
-        if rev is not None:
-            return hg.clean(self.repo, rev)
-
-    def good(self, rev):
-        self.goodrevs.append(rev)
-
-    def autogood(self, rev=None):
-        """mark revision as good and update to the next revision to test"""
-        check_clean(self.ui, self.repo)
-        rev = lookup_rev(self.ui, self.repo, rev)
-        self.good(rev)
-        if self.badrev:
-            return self.autonext()
-
-    def bad(self, rev):
-        self.badrev = rev
-
-    def autobad(self, rev=None):
-        """mark revision as bad and update to the next revision to test"""
-        check_clean(self.ui, self.repo)
-        rev = lookup_rev(self.ui, self.repo, rev)
-        self.bad(rev)
-        if self.goodrevs:
-            self.autonext()
-
-# should we put it in the class ?
-def test(ui, repo, rev):
-    """test the bisection code"""
-    b = bisect(ui, repo)
-    rev = repo.lookup(rev)
-    ui.write("testing with rev %s\n" % hg.hex(rev))
-    anc = b.ancestors()
-    while len(anc) > 1:
-        if not rev in anc:
-            ui.warn("failure while bisecting\n")
-            sys.exit(1)
-        ui.write("it worked :)\n")
-        new_rev = b.next()
-        ui.write("choosing if good or bad\n")
-        if rev in b.ancestors(head=new_rev):
-            b.bad(new_rev)
-            ui.write("it is bad\n")
-        else:
-            b.good(new_rev)
-            ui.write("it is good\n")
-        anc = b.ancestors()
-        #repo.update(new_rev, force=True)
-    for v in anc:
-        if v != rev:
-            ui.warn("fail to found cset! :(\n")
-            return 1
-    ui.write("Found bad cset: %s\n" % hg.hex(b.badrev))
-    ui.write("Everything is ok :)\n")
-    return 0
-
-def bisect_run(ui, repo, cmd=None, *args):
-    """Dichotomic search in the DAG of changesets
-
-This extension helps to find changesets which cause problems.
-To use, mark the earliest changeset you know introduces the problem
-as bad, then mark the latest changeset which is free from the problem
-as good. Bisect will update your working directory to a revision for
-testing. Once you have performed tests, mark the working directory
-as bad or good and bisect will either update to another candidate
-changeset or announce that it has found the bad revision.
-
-Note: bisect expects bad revisions to be descendants of good revisions.
-If you are looking for the point at which a problem was fixed, then make
-the problem-free state "bad" and the problematic state "good."
-
-For subcommands see "hg bisect help\"
-    """
-    def help_(cmd=None, *args):
-        """show help for a given bisect subcommand or all subcommands"""
-        cmdtable = bisectcmdtable
-        if cmd:
-            doc = cmdtable[cmd][0].__doc__
-            synopsis = cmdtable[cmd][2]
-            ui.write(synopsis + "\n")
-            ui.write("\n" + doc + "\n")
-            return
-        ui.write(_("list of subcommands for the bisect extension\n\n"))
-        cmds = cmdtable.keys()
-        cmds.sort()
-        m = max([len(c) for c in cmds])
-        for cmd in cmds:
-            doc = cmdtable[cmd][0].__doc__.splitlines(0)[0].rstrip()
-            ui.write(" %-*s   %s\n" % (m, cmd, doc))
-
-    b = bisect(ui, repo)
-    bisectcmdtable = {
-        "init": (b.init, 0, _("hg bisect init")),
-        "bad": (b.autobad, 1, _("hg bisect bad [<rev>]")),
-        "good": (b.autogood, 1, _("hg bisect good [<rev>]")),
-        "next": (b.autonext, 0, _("hg bisect next")),
-        "reset": (b.reset, 0, _("hg bisect reset")),
-        "help": (help_, 1, _("hg bisect help [<subcommand>]")),
-    }
-
-    if not bisectcmdtable.has_key(cmd):
-        ui.warn(_("bisect: Unknown sub-command\n"))
-        return help_()
-    if len(args) > bisectcmdtable[cmd][1]:
-        ui.warn(_("bisect: Too many arguments\n"))
-        return help_()
-    try:
-        return bisectcmdtable[cmd][0](*args)
-    finally:
-        b.write()
-
-cmdtable = {
-    "bisect": (bisect_run, [], _("hg bisect [help|init|reset|next|good|bad]")),
-    #"bisect-test": (test, [], "hg bisect-test rev"),
-}
--- a/hgext/hgk.py	Thu Jul 26 07:56:27 2007 -0400
+++ b/hgext/hgk.py	Wed Feb 06 19:57:52 2008 -0800
@@ -4,8 +4,48 @@
 #
 # This software may be used and distributed according to the terms
 # of the GNU General Public License, incorporated herein by reference.
+#
+# The hgk extension allows browsing the history of a repository in a
+# graphical way. It requires Tcl/Tk version 8.4 or later. (Tcl/Tk is
+# not distributed with Mercurial.)
+#
+# hgk consists of two parts: a Tcl script that does the displaying and
+# querying of information, and an extension to mercurial named hgk.py,
+# which provides hooks for hgk to get information. hgk can be found in
+# the contrib directory, and hgk.py can be found in the hgext
+# directory.
+#
+# To load the hgext.py extension, add it to your .hgrc file (you have
+# to use your global $HOME/.hgrc file, not one in a repository). You
+# can specify an absolute path:
+#
+#   [extensions]
+#   hgk=/usr/local/lib/hgk.py
+#
+# Mercurial can also scan the default python library path for a file
+# named 'hgk.py' if you set hgk empty:
+#
+#   [extensions]
+#   hgk=
+#
+# The hg view command will launch the hgk Tcl script. For this command
+# to work, hgk must be in your search path. Alternately, you can
+# specify the path to hgk in your .hgrc file:
+#
+#   [hgk]
+#   path=/location/of/hgk
+#
+# hgk can make use of the extdiff extension to visualize
+# revisions. Assuming you had already configured extdiff vdiff
+# command, just add:
+#
+#   [hgk]
+#   vdiff=vdiff
+#
+# Revisions context menu will now display additional entries to fire
+# vdiff on hovered and selected revisions.
 
-import sys, os
+import os
 from mercurial import hg, fancyopts, commands, ui, util, patch, revlog
 
 def difftree(ui, repo, node1=None, node2=None, *files, **opts):
@@ -21,17 +61,14 @@
 
         for f in modified:
             # TODO get file permissions
-            print ":100664 100664 %s %s M\t%s\t%s" % (hg.short(mmap[f]),
-                                                      hg.short(mmap2[f]),
-                                                      f, f)
+            ui.write(":100664 100664 %s %s M\t%s\t%s\n" %
+                     (hg.short(mmap[f]), hg.short(mmap2[f]), f, f))
         for f in added:
-            print ":000000 100664 %s %s N\t%s\t%s" % (empty,
-                                                      hg.short(mmap2[f]),
-                                                      f, f)
+            ui.write(":000000 100664 %s %s N\t%s\t%s\n" %
+                     (empty, hg.short(mmap2[f]), f, f))
         for f in removed:
-            print ":100664 000000 %s %s D\t%s\t%s" % (hg.short(mmap[f]),
-                                                      empty,
-                                                      f, f)
+            ui.write(":100664 000000 %s %s D\t%s\t%s\n" %
+                     (hg.short(mmap[f]), empty, f, f))
     ##
 
     while True:
@@ -53,7 +90,7 @@
             node1 = repo.changelog.parents(node1)[0]
         if opts['patch']:
             if opts['pretty']:
-                catcommit(repo, node2, "")
+                catcommit(ui, repo, node2, "")
             patch.diff(repo, node1, node2,
                        files=files,
                        opts=patch.diffopts(ui, {'git': True}))
@@ -62,14 +99,14 @@
         if not opts['stdin']:
             break
 
-def catcommit(repo, n, prefix, ctx=None):
+def catcommit(ui, repo, n, prefix, ctx=None):
     nlprefix = '\n' + prefix;
     if ctx is None:
         ctx = repo.changectx(n)
     (p1, p2) = ctx.parents()
-    print "tree %s" % (hg.short(ctx.changeset()[0])) # use ctx.node() instead ??
-    if p1: print "parent %s" % (hg.short(p1.node()))
-    if p2: print "parent %s" % (hg.short(p2.node()))
+    ui.write("tree %s\n" % hg.short(ctx.changeset()[0])) # use ctx.node() instead ??
+    if p1: ui.write("parent %s\n" % hg.short(p1.node()))
+    if p2: ui.write("parent %s\n" % hg.short(p2.node()))
     date = ctx.date()
     description = ctx.description().replace("\0", "")
     lines = description.splitlines()
@@ -78,23 +115,24 @@
     else:
         committer = ctx.user()
 
-    print "author %s %s %s" % (ctx.user(), int(date[0]), date[1])
-    print "committer %s %s %s" % (committer, int(date[0]), date[1])
-    print "revision %d" % ctx.rev()
-    print ""
+    ui.write("author %s %s %s\n" % (ctx.user(), int(date[0]), date[1]))
+    ui.write("committer %s %s %s\n" % (committer, int(date[0]), date[1]))
+    ui.write("revision %d\n" % ctx.rev())
+    ui.write("branch %s\n\n" % ctx.branch())
+
     if prefix != "":
-        print "%s%s" % (prefix, description.replace('\n', nlprefix).strip())
+        ui.write("%s%s\n" % (prefix, description.replace('\n', nlprefix).strip()))
     else:
-        print description
+        ui.write(description + "\n")
     if prefix:
-        sys.stdout.write('\0')
+        ui.write('\0')
 
 def base(ui, repo, node1, node2):
     """Output common ancestor information"""
     node1 = repo.lookup(node1)
     node2 = repo.lookup(node2)
     n = repo.changelog.ancestor(node1, node2)
-    print hg.short(n)
+    ui.write(hg.short(n) + "\n")
 
 def catfile(ui, repo, type=None, r=None, **opts):
     """cat a specific revision"""
@@ -117,10 +155,10 @@
 
     while r:
         if type != "commit":
-            sys.stderr.write("aborting hg cat-file only understands commits\n")
-            sys.exit(1);
+            ui.warn("aborting hg cat-file only understands commits\n")
+            return 1;
         n = repo.lookup(r)
-        catcommit(repo, n, prefix)
+        catcommit(ui, repo, n, prefix)
         if opts['stdin']:
             try:
                 (type, r) = raw_input().split(' ');
@@ -134,7 +172,7 @@
 # telling you which commits are reachable from the supplied ones via
 # a bitmask based on arg position.
 # you can specify a commit to stop at by starting the sha1 with ^
-def revtree(args, repo, full="tree", maxnr=0, parents=False):
+def revtree(ui, args, repo, full="tree", maxnr=0, parents=False):
     def chlogwalk():
         count = repo.changelog.count()
         i = count
@@ -219,24 +257,24 @@
                 if pp[1] != hg.nullid:
                     parentstr += " " + hg.short(pp[1])
             if not full:
-                print hg.short(n) + parentstr
+                ui.write("%s%s\n" % (hg.short(n), parentstr))
             elif full == "commit":
-                print hg.short(n) + parentstr
-                catcommit(repo, n, '    ', ctx)
+                ui.write("%s%s\n" % (hg.short(n), parentstr))
+                catcommit(ui, repo, n, '    ', ctx)
             else:
                 (p1, p2) = repo.changelog.parents(n)
                 (h, h1, h2) = map(hg.short, (n, p1, p2))
                 (i1, i2) = map(repo.changelog.rev, (p1, p2))
 
                 date = ctx.date()[0]
-                print "%s %s:%s" % (date, h, mask),
+                ui.write("%s %s:%s" % (date, h, mask))
                 mask = is_reachable(want_sha1, reachable, p1)
                 if i1 != hg.nullrev and mask > 0:
-                    print "%s:%s " % (h1, mask),
+                    ui.write("%s:%s " % (h1, mask)),
                 mask = is_reachable(want_sha1, reachable, p2)
                 if i2 != hg.nullrev and mask > 0:
-                    print "%s:%s " % (h2, mask),
-                print ""
+                    ui.write("%s:%s " % (h2, mask))
+                ui.write("\n")
             if maxnr and count >= maxnr:
                 break
             count += 1
@@ -264,7 +302,15 @@
     else:
         full = None
     copy = [x for x in revs]
-    revtree(copy, repo, full, opts['max_count'], opts['parents'])
+    revtree(ui, copy, repo, full, opts['max_count'], opts['parents'])
+
+def config(ui, repo, **opts):
+    """print extension options"""
+    def writeopt(name, value):
+        ui.write('k=%s\nv=%s\n' % (name, value))
+
+    writeopt('vdiff', ui.config('hgk', 'vdiff', ''))
+
 
 def view(ui, repo, *etc, **opts):
     "start interactive history viewer"
@@ -292,6 +338,8 @@
         (catfile,
          [('s', 'stdin', None, 'stdin')],
          'hg debug-cat-file [OPTION]... TYPE FILE'),
+    "debug-config":
+        (config, [], 'hg debug-config'),
     "debug-merge-base":
         (base, [], 'hg debug-merge-base node node'),
     "debug-rev-parse":
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/hgext/highlight.py	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,139 @@
+"""
+This is Mercurial extension for syntax highlighting in the file
+revision view of hgweb.
+
+It depends on the pygments syntax highlighting library:
+http://pygments.org/
+
+To enable the extension add this to hgrc:
+
+[extensions]
+hgext.highlight =
+
+There is a single configuration option:
+
+[web]
+pygments_style = <style>
+
+The default is 'colorful'.  If this is changed the corresponding CSS
+file should be re-generated by running
+
+# pygmentize -f html -S <newstyle>
+
+
+-- Adam Hupp <adam@hupp.org>
+
+
+"""
+
+from mercurial import demandimport
+demandimport.ignore.extend(['pkgutil',
+                            'pkg_resources',
+                            '__main__',])
+
+import mimetypes
+
+from mercurial.hgweb import hgweb_mod
+from mercurial.hgweb.hgweb_mod import hgweb
+from mercurial import util
+from mercurial.hgweb.common import paritygen
+from mercurial.node import hex
+
+from pygments import highlight
+from pygments.util import ClassNotFound
+from pygments.lexers import guess_lexer_for_filename, TextLexer
+from pygments.formatters import HtmlFormatter
+
+SYNTAX_CSS = ('\n<link rel="stylesheet" href="#staticurl#highlight.css" '
+              'type="text/css" />')
+
+class StripedHtmlFormatter(HtmlFormatter):
+    def __init__(self, stripecount, *args, **kwargs):
+        super(StripedHtmlFormatter, self).__init__(*args, **kwargs)
+        self.stripecount = stripecount
+
+    def wrap(self, source, outfile):
+        yield 0, "<div class='highlight'>"
+        yield 0, "<pre>"
+        parity = paritygen(self.stripecount)
+
+        for n, i in source:
+            if n == 1:
+                i = "<div class='parity%s'>%s</div>" % (parity.next(), i)
+            yield n, i
+
+        yield 0, "</pre>"
+        yield 0, "</div>"
+
+
+def pygments_format(filename, text, forcetext, stripecount, style):
+    if not forcetext:
+        try:
+            lexer = guess_lexer_for_filename(filename, text,
+                                             encoding=util._encoding)
+        except ClassNotFound:
+            lexer = TextLexer(encoding=util._encoding)
+    else:
+        lexer = TextLexer(encoding=util._encoding)
+
+    formatter = StripedHtmlFormatter(stripecount, style=style,
+                                     linenos='inline', encoding=util._encoding)
+
+    return highlight(text, lexer, formatter)
+
+
+def filerevision_pygments(self, tmpl, fctx):
+    """Reimplement hgweb.filerevision to use syntax highlighting"""
+    f = fctx.path()
+    text = fctx.data()
+    fl = fctx.filelog()
+    n = fctx.filenode()
+
+    if util.binary(text):
+        mt = mimetypes.guess_type(f)[0] or 'application/octet-stream'
+        text = "(binary:%s)" % mt
+        # don't parse (binary:...) as anything
+        forcetext = True
+    else:
+        # encode to hgweb.encoding for lexers and formatter
+        util._encoding = self.encoding
+        text = util.tolocal(text)
+        forcetext = False
+
+    def lines(text):
+        for line in text.splitlines(True):
+            yield {"line": line}
+
+    style = self.config("web", "pygments_style", "colorful")
+
+    text_formatted = lines(pygments_format(f, text, forcetext,
+                                           self.stripecount, style))
+
+    # override per-line template
+    tmpl.cache['fileline'] = '#line#'
+
+    # append a <link ...> to the syntax highlighting css
+    old_header = ''.join(tmpl('header'))
+    if SYNTAX_CSS not in old_header:
+        new_header =  old_header + SYNTAX_CSS
+        tmpl.cache['header'] = new_header
+
+    return tmpl("filerevision",
+               file=f,
+               path=hgweb_mod._up(f), # fixme: make public
+               text=text_formatted,
+               rev=fctx.rev(),
+               node=hex(fctx.node()),
+               author=fctx.user(),
+               date=fctx.date(),
+               desc=fctx.description(),
+               parent=self.siblings(fctx.parents()),
+               child=self.siblings(fctx.children()),
+               rename=self.renamelink(fl, n),
+               permissions=fctx.manifest().flags(f))
+
+
+# monkeypatch in the new version
+# should be safer than overriding the method in a derived class
+# and then patching the class
+hgweb.filerevision = filerevision_pygments
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/hgext/imerge.py	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,406 @@
+# Copyright (C) 2007 Brendan Cully <brendan@kublai.com>
+# Published under the GNU GPL
+
+'''
+imerge - interactive merge
+'''
+
+from mercurial.i18n import _
+from mercurial.node import *
+from mercurial import commands, cmdutil, dispatch, fancyopts
+from mercurial import hg, filemerge, util
+import os, tarfile
+
+class InvalidStateFileException(Exception): pass
+
+class ImergeStateFile(object):
+    def __init__(self, im):
+        self.im = im
+
+    def save(self, dest):
+        tf = tarfile.open(dest, 'w:gz')
+
+        st = os.path.join(self.im.path, 'status')
+        tf.add(st, os.path.join('.hg', 'imerge', 'status'))
+
+        for f in self.im.resolved:
+            (fd, fo) = self.im.conflicts[f]
+            abssrc = self.im.repo.wjoin(fd)
+            tf.add(abssrc, fd)
+
+        tf.close()
+
+    def load(self, source):
+        wlock = self.im.repo.wlock()
+        lock = self.im.repo.lock()
+
+        tf = tarfile.open(source, 'r')
+        contents = tf.getnames()
+        # tarfile normalizes path separators to '/'
+        statusfile = '.hg/imerge/status'
+        if statusfile not in contents:
+            raise InvalidStateFileException('no status file')
+
+        tf.extract(statusfile, self.im.repo.root)
+        p1, p2 = self.im.load()
+        if self.im.repo.dirstate.parents()[0] != p1.node():
+            hg.clean(self.im.repo, p1.node())
+        self.im.start(p2.node())
+        for tarinfo in tf:
+            tf.extract(tarinfo, self.im.repo.root)
+        self.im.load()
+
+class Imerge(object):
+    def __init__(self, ui, repo):
+        self.ui = ui
+        self.repo = repo
+
+        self.path = repo.join('imerge')
+        self.opener = util.opener(self.path)
+
+        self.wctx = self.repo.workingctx()
+        self.conflicts = {}
+        self.resolved = []
+
+    def merging(self):
+        return len(self.wctx.parents()) > 1
+
+    def load(self):
+        # status format. \0-delimited file, fields are
+        # p1, p2, conflict count, conflict filenames, resolved filenames
+        # conflict filenames are tuples of localname, remoteorig, remotenew
+
+        statusfile = self.opener('status')
+
+        status = statusfile.read().split('\0')
+        if len(status) < 3:
+            raise util.Abort('invalid imerge status file')
+
+        try:
+            parents = [self.repo.changectx(n) for n in status[:2]]
+        except LookupError:
+            raise util.Abort('merge parent %s not in repository' % short(p))
+
+        status = status[2:]
+        conflicts = int(status.pop(0)) * 3
+        self.resolved = status[conflicts:]
+        for i in xrange(0, conflicts, 3):
+            self.conflicts[status[i]] = (status[i+1], status[i+2])
+
+        return parents
+
+    def save(self):
+        lock = self.repo.lock()
+
+        if not os.path.isdir(self.path):
+            os.mkdir(self.path)
+        statusfile = self.opener('status', 'wb')
+
+        out = [hex(n.node()) for n in self.wctx.parents()]
+        out.append(str(len(self.conflicts)))
+        conflicts = self.conflicts.items()
+        conflicts.sort()
+        for fw, fd_fo in conflicts:
+            out.append(fw)
+            out.extend(fd_fo)
+        out.extend(self.resolved)
+
+        statusfile.write('\0'.join(out))
+
+    def remaining(self):
+        return [f for f in self.conflicts if f not in self.resolved]
+
+    def filemerge(self, fn, interactive=True):
+        wlock = self.repo.wlock()
+
+        (fd, fo) = self.conflicts[fn]
+        p1, p2 = self.wctx.parents()
+
+        # this could be greatly improved
+        realmerge = os.environ.get('HGMERGE')
+        if not interactive:
+            os.environ['HGMERGE'] = 'merge'
+
+        # The filemerge ancestor algorithm does not work if self.wctx
+        # already has two parents (in normal merge it doesn't yet). But
+        # this is very dirty.
+        self.wctx._parents.pop()
+        try:
+            # TODO: we should probably revert the file if merge fails
+            return filemerge.filemerge(self.repo, fn, fd, fo, self.wctx, p2)
+        finally:
+            self.wctx._parents.append(p2)
+            if realmerge:
+                os.environ['HGMERGE'] = realmerge
+            elif not interactive:
+                del os.environ['HGMERGE']
+
+    def start(self, rev=None):
+        _filemerge = filemerge.filemerge
+        def filemerge_(repo, fw, fd, fo, wctx, mctx):
+            self.conflicts[fw] = (fd, fo)
+
+        filemerge.filemerge = filemerge_
+        commands.merge(self.ui, self.repo, rev=rev)
+        filemerge.filemerge = _filemerge
+
+        self.wctx = self.repo.workingctx()
+        self.save()
+
+    def resume(self):
+        self.load()
+
+        dp = self.repo.dirstate.parents()
+        p1, p2 = self.wctx.parents()
+        if p1.node() != dp[0] or p2.node() != dp[1]:
+            raise util.Abort('imerge state does not match working directory')
+
+    def next(self):
+        remaining = self.remaining()
+        return remaining and remaining[0]
+
+    def resolve(self, files):
+        resolved = dict.fromkeys(self.resolved)
+        for fn in files:
+            if fn not in self.conflicts:
+                raise util.Abort('%s is not in the merge set' % fn)
+            resolved[fn] = True
+        self.resolved = resolved.keys()
+        self.resolved.sort()
+        self.save()
+        return 0
+
+    def unresolve(self, files):
+        resolved = dict.fromkeys(self.resolved)
+        for fn in files:
+            if fn not in resolved:
+                raise util.Abort('%s is not resolved' % fn)
+            del resolved[fn]
+        self.resolved = resolved.keys()
+        self.resolved.sort()
+        self.save()
+        return 0
+
+    def pickle(self, dest):
+        '''write current merge state to file to be resumed elsewhere'''
+        state = ImergeStateFile(self)
+        return state.save(dest)
+
+    def unpickle(self, source):
+        '''read merge state from file'''
+        state = ImergeStateFile(self)
+        return state.load(source)
+
+def load(im, source):
+    if im.merging():
+        raise util.Abort('there is already a merge in progress '
+                         '(update -C <rev> to abort it)' )
+    m, a, r, d =  im.repo.status()[:4]
+    if m or a or r or d:
+        raise util.Abort('working directory has uncommitted changes')
+
+    rc = im.unpickle(source)
+    if not rc:
+        status(im)
+    return rc
+
+def merge_(im, filename=None, auto=False):
+    success = True
+    if auto and not filename:
+        for fn in im.remaining():
+            rc = im.filemerge(fn, interactive=False)
+            if rc:
+                success = False
+            else:
+                im.resolve([fn])
+        if success:
+            im.ui.write('all conflicts resolved\n')
+        else:
+            status(im)
+        return 0
+
+    if not filename:
+        filename = im.next()
+        if not filename:
+            im.ui.write('all conflicts resolved\n')
+            return 0
+
+    rc = im.filemerge(filename, interactive=not auto)
+    if not rc:
+        im.resolve([filename])
+        if not im.next():
+            im.ui.write('all conflicts resolved\n')
+    return rc
+
+def next(im):
+    n = im.next()
+    if n:
+        im.ui.write('%s\n' % n)
+    else:
+        im.ui.write('all conflicts resolved\n')
+    return 0
+
+def resolve(im, *files):
+    if not files:
+        raise util.Abort('resolve requires at least one filename')
+    return im.resolve(files)
+
+def save(im, dest):
+    return im.pickle(dest)
+
+def status(im, **opts):
+    if not opts.get('resolved') and not opts.get('unresolved'):
+        opts['resolved'] = True
+        opts['unresolved'] = True
+
+    if im.ui.verbose:
+        p1, p2 = [short(p.node()) for p in im.wctx.parents()]
+        im.ui.note(_('merging %s and %s\n') % (p1, p2))
+
+    conflicts = im.conflicts.keys()
+    conflicts.sort()
+    remaining = dict.fromkeys(im.remaining())
+    st = []
+    for fn in conflicts:
+        if opts.get('no_status'):
+            mode = ''
+        elif fn in remaining:
+            mode = 'U '
+        else:
+            mode = 'R '
+        if ((opts.get('resolved') and fn not in remaining)
+            or (opts.get('unresolved') and fn in remaining)):
+            st.append((mode, fn))
+    st.sort()
+    for (mode, fn) in st:
+        if im.ui.verbose:
+            fo, fd = im.conflicts[fn]
+            if fd != fn:
+                fn = '%s (%s)' % (fn, fd)
+        im.ui.write('%s%s\n' % (mode, fn))
+    if opts.get('unresolved') and not remaining:
+        im.ui.write(_('all conflicts resolved\n'))
+
+    return 0
+
+def unresolve(im, *files):
+    if not files:
+        raise util.Abort('unresolve requires at least one filename')
+    return im.unresolve(files)
+
+subcmdtable = {
+    'load': (load, []),
+    'merge':
+        (merge_,
+         [('a', 'auto', None, _('automatically resolve if possible'))]),
+    'next': (next, []),
+    'resolve': (resolve, []),
+    'save': (save, []),
+    'status':
+        (status,
+         [('n', 'no-status', None, _('hide status prefix')),
+          ('', 'resolved', None, _('only show resolved conflicts')),
+          ('', 'unresolved', None, _('only show unresolved conflicts'))]),
+    'unresolve': (unresolve, [])
+}
+
+def dispatch_(im, args, opts):
+    def complete(s, choices):
+        candidates = []
+        for choice in choices:
+            if choice.startswith(s):
+                candidates.append(choice)
+        return candidates
+
+    c, args = args[0], list(args[1:])
+    cmd = complete(c, subcmdtable.keys())
+    if not cmd:
+        raise cmdutil.UnknownCommand('imerge ' + c)
+    if len(cmd) > 1:
+        cmd.sort()
+        raise cmdutil.AmbiguousCommand('imerge ' + c, cmd)
+    cmd = cmd[0]
+
+    func, optlist = subcmdtable[cmd]
+    opts = {}
+    try:
+        args = fancyopts.fancyopts(args, optlist, opts)
+        return func(im, *args, **opts)
+    except fancyopts.getopt.GetoptError, inst:
+        raise dispatch.ParseError('imerge', '%s: %s' % (cmd, inst))
+    except TypeError:
+        raise dispatch.ParseError('imerge', _('%s: invalid arguments') % cmd)
+
+def imerge(ui, repo, *args, **opts):
+    '''interactive merge
+
+    imerge lets you split a merge into pieces. When you start a merge
+    with imerge, the names of all files with conflicts are recorded.
+    You can then merge any of these files, and if the merge is
+    successful, they will be marked as resolved. When all files are
+    resolved, the merge is complete.
+
+    If no merge is in progress, hg imerge [rev] will merge the working
+    directory with rev (defaulting to the other head if the repository
+    only has two heads). You may also resume a saved merge with
+    hg imerge load <file>.
+
+    If a merge is in progress, hg imerge will default to merging the
+    next unresolved file.
+
+    The following subcommands are available:
+
+    status:
+      show the current state of the merge
+      options:
+        -n --no-status:  do not print the status prefix
+           --resolved:   only print resolved conflicts
+           --unresolved: only print unresolved conflicts
+    next:
+      show the next unresolved file merge
+    merge [<file>]:
+      merge <file>. If the file merge is successful, the file will be
+      recorded as resolved. If no file is given, the next unresolved
+      file will be merged.
+    resolve <file>...:
+      mark files as successfully merged
+    unresolve <file>...:
+      mark files as requiring merging.
+    save <file>:
+      save the state of the merge to a file to be resumed elsewhere
+    load <file>:
+      load the state of the merge from a file created by save
+    '''
+
+    im = Imerge(ui, repo)
+
+    if im.merging():
+        im.resume()
+    else:
+        rev = opts.get('rev')
+        if rev and args:
+            raise util.Abort('please specify just one revision')
+
+        if len(args) == 2 and args[0] == 'load':
+            pass
+        else:
+            if args:
+                rev = args[0]
+            im.start(rev=rev)
+            if opts.get('auto'):
+                args = ['merge', '--auto']
+            else:
+                args = ['status']
+
+    if not args:
+        args = ['merge']
+
+    return dispatch_(im, args, opts)
+
+cmdtable = {
+    '^imerge':
+    (imerge,
+     [('r', 'rev', '', _('revision to merge')),
+      ('a', 'auto', None, _('automatically merge where possible'))],
+      'hg imerge [command]')
+}
--- a/hgext/interhg.py	Thu Jul 26 07:56:27 2007 -0400
+++ b/hgext/interhg.py	Wed Feb 06 19:57:52 2008 -0800
@@ -2,6 +2,9 @@
 #
 # Copyright 2007 OHASHI Hideya <ohachige@gmail.com>
 #
+# Contributor(s):
+#   Edward Lee <edward.lee@engineering.uiuc.edu>
+#
 # This software may be used and distributed according to the terms
 # of the GNU General Public License, incorporated herein by reference.
 #
@@ -13,52 +16,68 @@
 #   [extensions]
 #   interhg =
 #
-# This is an example to link to a bug tracking system.
+# These are some example patterns (link to bug tracking, etc.)
 #
 #   [interhg]
-#   pat1 = s/issue(\d+)/ <a href="http:\/\/bts\/issue\1">issue\1<\/a> /
+#   issues = s!issue(\d+)!<a href="http://bts/issue\1">issue\1<\/a>!
+#   bugzilla = s!((?:bug|b=|(?=#?\d{4,}))(?:\s*#?)(\d+))!<a..=\2">\1</a>!i
+#   boldify = s/(^|\s)#(\d+)\b/ <b>#\2<\/b>/
 #
-# You can add patterns to use pat2, pat3, ...
-# For exapmle.
-#
-#   pat2 = s/(^|\s)#(\d+)\b/ <b>#\2<\/b> /
+# Add any number of names and patterns to match
 
 import re
 from mercurial.hgweb import hgweb_mod
-from mercurial import templater
+from mercurial import templatefilters
 
-orig_escape = templater.common_filters["escape"]
+orig_escape = templatefilters.filters["escape"]
 
 interhg_table = []
 
 def interhg_escape(x):
     escstr = orig_escape(x)
-    for pat in interhg_table:
-        regexp = pat[0]
-        format = pat[1]
+    for regexp, format in interhg_table:
         escstr = regexp.sub(format, escstr)
     return escstr
 
-templater.common_filters["escape"] = interhg_escape
+templatefilters.filters["escape"] = interhg_escape
 
 orig_refresh = hgweb_mod.hgweb.refresh
 
 def interhg_refresh(self):
     interhg_table[:] = []
-    num = 1
-    while True:
-        key = 'pat%d' % num
-        pat = self.config('interhg', key)
-        if pat == None:
-            break
-        pat = pat[2:-1]
-        span = re.search(r'[^\\]/', pat).span()
-        regexp = pat[:span[0] + 1]
-        format = pat[span[1]:]
-        format = re.sub(r'\\/', '/', format)
-        regexp = re.compile(regexp)
-        interhg_table.append((regexp, format))
-        num += 1
+    for key, pattern in self.repo.ui.configitems('interhg'):
+        # grab the delimiter from the character after the "s"
+        unesc = pattern[1]
+        delim = re.escape(unesc)
+
+        # identify portions of the pattern, taking care to avoid escaped
+        # delimiters. the replace format and flags are optional, but delimiters
+        # are required.
+        match = re.match(r'^s%s(.+)(?:(?<=\\\\)|(?<!\\))%s(.*)%s([ilmsux])*$'
+                         % (delim, delim, delim), pattern)
+        if not match:
+            self.repo.ui.warn("interhg: invalid pattern for %s: %s\n"
+                              % (key, pattern))
+            continue
+
+        # we need to unescape the delimiter for regexp and format
+        delim_re = re.compile(r'(?<!\\)\\%s' % delim)
+        regexp = delim_re.sub(unesc, match.group(1))
+        format = delim_re.sub(unesc, match.group(2))
+
+        # the pattern allows for 6 regexp flags, so set them if necessary
+        flagin = match.group(3)
+        flags = 0
+        if flagin:
+            for flag in flagin.upper():
+                flags |= re.__dict__[flag]
+
+        try:
+            regexp = re.compile(regexp, flags)
+            interhg_table.append((regexp, format))
+        except re.error:
+            self.repo.ui.warn("interhg: invalid regexp for %s: %s\n"
+                              % (key, regexp))
     return orig_refresh(self)
 
 hgweb_mod.hgweb.refresh = interhg_refresh
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/hgext/keyword.py	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,520 @@
+# keyword.py - $Keyword$ expansion for Mercurial
+#
+# Copyright 2007, 2008 Christian Ebert <blacktrash@gmx.net>
+#
+# This software may be used and distributed according to the terms
+# of the GNU General Public License, incorporated herein by reference.
+#
+# $Id$
+#
+# Keyword expansion hack against the grain of a DSCM
+#
+# There are many good reasons why this is not needed in a distributed
+# SCM, still it may be useful in very small projects based on single
+# files (like LaTeX packages), that are mostly addressed to an audience
+# not running a version control system.
+#
+# For in-depth discussion refer to
+# <http://www.selenic.com/mercurial/wiki/index.cgi/KeywordPlan>.
+#
+# Keyword expansion is based on Mercurial's changeset template mappings.
+#
+# Binary files are not touched.
+#
+# Setup in hgrc:
+#
+#   [extensions]
+#   # enable extension
+#   hgext.keyword =
+#
+# Files to act upon/ignore are specified in the [keyword] section.
+# Customized keyword template mappings in the [keywordmaps] section.
+#
+# Run "hg help keyword" and "hg kwdemo" to get info on configuration.
+
+'''keyword expansion in local repositories
+
+This extension expands RCS/CVS-like or self-customized $Keywords$
+in tracked text files selected by your configuration.
+
+Keywords are only expanded in local repositories and not stored in
+the change history. The mechanism can be regarded as a convenience
+for the current user or for archive distribution.
+
+Configuration is done in the [keyword] and [keywordmaps] sections
+of hgrc files.
+
+Example:
+
+    [keyword]
+    # expand keywords in every python file except those matching "x*"
+    **.py =
+    x*    = ignore
+
+Note: the more specific you are in your filename patterns
+      the less you lose speed in huge repos.
+
+For [keywordmaps] template mapping and expansion demonstration and
+control run "hg kwdemo".
+
+An additional date template filter {date|utcdate} is provided.
+
+The default template mappings (view with "hg kwdemo -d") can be replaced
+with customized keywords and templates.
+Again, run "hg kwdemo" to control the results of your config changes.
+
+Before changing/disabling active keywords, run "hg kwshrink" to avoid
+the risk of inadvertedly storing expanded keywords in the change history.
+
+To force expansion after enabling it, or a configuration change, run
+"hg kwexpand".
+
+Also, when committing with the record extension or using mq's qrecord, be aware
+that keywords cannot be updated. Again, run "hg kwexpand" on the files in
+question to update keyword expansions after all changes have been checked in.
+
+Expansions spanning more than one line and incremental expansions,
+like CVS' $Log$, are not supported. A keyword template map
+"Log = {desc}" expands to the first line of the changeset description.
+'''
+
+from mercurial import commands, cmdutil, context, dispatch, filelog, revlog
+from mercurial import patch, localrepo, templater, templatefilters, util
+from mercurial.node import *
+from mercurial.i18n import _
+import re, shutil, sys, tempfile, time
+
+commands.optionalrepo += ' kwdemo'
+
+# hg commands that do not act on keywords
+nokwcommands = ('add addremove bundle copy export grep identify incoming init'
+                ' log outgoing push remove rename rollback tip convert')
+
+# hg commands that trigger expansion only when writing to working dir,
+# not when reading filelog, and unexpand when reading from working dir
+restricted = 'diff1 record qfold qimport qnew qpush qrefresh qrecord'
+
+def utcdate(date):
+    '''Returns hgdate in cvs-like UTC format.'''
+    return time.strftime('%Y/%m/%d %H:%M:%S', time.gmtime(date[0]))
+
+
+_kwtemplater = None
+
+class kwtemplater(object):
+    '''
+    Sets up keyword templates, corresponding keyword regex, and
+    provides keyword substitution functions.
+    '''
+    templates = {
+        'Revision': '{node|short}',
+        'Author': '{author|user}',
+        'Date': '{date|utcdate}',
+        'RCSFile': '{file|basename},v',
+        'Source': '{root}/{file},v',
+        'Id': '{file|basename},v {node|short} {date|utcdate} {author|user}',
+        'Header': '{root}/{file},v {node|short} {date|utcdate} {author|user}',
+    }
+
+    def __init__(self, ui, repo, inc, exc, restricted):
+        self.ui = ui
+        self.repo = repo
+        self.matcher = util.matcher(repo.root, inc=inc, exc=exc)[1]
+        self.restricted = restricted
+        self.commitnode = None
+        self.path = ''
+
+        kwmaps = self.ui.configitems('keywordmaps')
+        if kwmaps: # override default templates
+            kwmaps = [(k, templater.parsestring(v, quoted=False))
+                      for (k, v) in kwmaps]
+            self.templates = dict(kwmaps)
+        escaped = map(re.escape, self.templates.keys())
+        kwpat = r'\$(%s)(: [^$\n\r]*? )??\$' % '|'.join(escaped)
+        self.re_kw = re.compile(kwpat)
+
+        templatefilters.filters['utcdate'] = utcdate
+        self.ct = cmdutil.changeset_templater(self.ui, self.repo,
+                                              False, '', False)
+
+    def substitute(self, node, data, subfunc):
+        '''Obtains file's changenode if commit node not given,
+        and calls given substitution function.'''
+        if self.commitnode:
+            fnode = self.commitnode
+        else:
+            c = context.filectx(self.repo, self.path, fileid=node)
+            fnode = c.node()
+
+        def kwsub(mobj):
+            '''Substitutes keyword using corresponding template.'''
+            kw = mobj.group(1)
+            self.ct.use_template(self.templates[kw])
+            self.ui.pushbuffer()
+            self.ct.show(changenode=fnode, root=self.repo.root, file=self.path)
+            ekw = templatefilters.firstline(self.ui.popbuffer())
+            return '$%s: %s $' % (kw, ekw)
+
+        return subfunc(kwsub, data)
+
+    def expand(self, node, data):
+        '''Returns data with keywords expanded.'''
+        if self.restricted or util.binary(data):
+            return data
+        return self.substitute(node, data, self.re_kw.sub)
+
+    def process(self, node, data, expand):
+        '''Returns a tuple: data, count.
+        Count is number of keywords/keyword substitutions,
+        telling caller whether to act on file containing data.'''
+        if util.binary(data):
+            return data, None
+        if expand:
+            return self.substitute(node, data, self.re_kw.subn)
+        return data, self.re_kw.search(data)
+
+    def shrink(self, text):
+        '''Returns text with all keyword substitutions removed.'''
+        if util.binary(text):
+            return text
+        return self.re_kw.sub(r'$\1$', text)
+
+class kwfilelog(filelog.filelog):
+    '''
+    Subclass of filelog to hook into its read, add, cmp methods.
+    Keywords are "stored" unexpanded, and processed on reading.
+    '''
+    def __init__(self, opener, path):
+        super(kwfilelog, self).__init__(opener, path)
+        _kwtemplater.path = path
+
+    def kwctread(self, node, expand):
+        '''Reads expanding and counting keywords, called from _overwrite.'''
+        data = super(kwfilelog, self).read(node)
+        return _kwtemplater.process(node, data, expand)
+
+    def read(self, node):
+        '''Expands keywords when reading filelog.'''
+        data = super(kwfilelog, self).read(node)
+        return _kwtemplater.expand(node, data)
+
+    def add(self, text, meta, tr, link, p1=None, p2=None):
+        '''Removes keyword substitutions when adding to filelog.'''
+        text = _kwtemplater.shrink(text)
+        return super(kwfilelog, self).add(text, meta, tr, link, p1=p1, p2=p2)
+
+    def cmp(self, node, text):
+        '''Removes keyword substitutions for comparison.'''
+        text = _kwtemplater.shrink(text)
+        if self.renamed(node):
+            t2 = super(kwfilelog, self).read(node)
+            return t2 != text
+        return revlog.revlog.cmp(self, node, text)
+
+
+# store original patch.patchfile.__init__
+_patchfile_init = patch.patchfile.__init__
+
+def _kwpatchfile_init(self, ui, fname, missing=False):
+    '''Monkeypatch/wrap patch.patchfile.__init__ to avoid
+    rejects or conflicts due to expanded keywords in working dir.'''
+    _patchfile_init(self, ui, fname, missing=missing)
+
+    if _kwtemplater.matcher(self.fname):
+        # shrink keywords read from working dir
+        kwshrunk = _kwtemplater.shrink(''.join(self.lines))
+        self.lines = kwshrunk.splitlines(True)
+
+
+def _iskwfile(f, link):
+    return not link(f) and _kwtemplater.matcher(f)
+
+def _status(ui, repo, *pats, **opts):
+    '''Bails out if [keyword] configuration is not active.
+    Returns status of working directory.'''
+    if _kwtemplater:
+        files, match, anypats = cmdutil.matchpats(repo, pats, opts)
+        return repo.status(files=files, match=match, list_clean=True)
+    if ui.configitems('keyword'):
+        raise util.Abort(_('[keyword] patterns cannot match'))
+    raise util.Abort(_('no [keyword] patterns configured'))
+
+def _overwrite(ui, repo, node=None, expand=True, files=None):
+    '''Overwrites selected files expanding/shrinking keywords.'''
+    ctx = repo.changectx(node)
+    mf = ctx.manifest()
+    if node is not None:   # commit
+        _kwtemplater.commitnode = node
+        files = [f for f in ctx.files() if f in mf]
+        notify = ui.debug
+    else:                  # kwexpand/kwshrink
+        notify = ui.note
+    candidates = [f for f in files if _iskwfile(f, mf.linkf)]
+    if candidates:
+        candidates.sort()
+        action = expand and 'expanding' or 'shrinking'
+        for f in candidates:
+            fp = repo.file(f, kwmatch=True)
+            data, kwfound = fp.kwctread(mf[f], expand)
+            if kwfound:
+                notify(_('overwriting %s %s keywords\n') % (f, action))
+                repo.wwrite(f, data, mf.flags(f))
+                repo.dirstate.normal(f)
+
+def _kwfwrite(ui, repo, expand, *pats, **opts):
+    '''Selects files and passes them to _overwrite.'''
+    status = _status(ui, repo, *pats, **opts)
+    modified, added, removed, deleted, unknown, ignored, clean = status
+    if modified or added or removed or deleted:
+        raise util.Abort(_('outstanding uncommitted changes in given files'))
+    wlock = lock = None
+    try:
+        wlock = repo.wlock()
+        lock = repo.lock()
+        _overwrite(ui, repo, expand=expand, files=clean)
+    finally:
+        del wlock, lock
+
+
+def demo(ui, repo, *args, **opts):
+    '''print [keywordmaps] configuration and an expansion example
+
+    Show current, custom, or default keyword template maps
+    and their expansion.
+
+    Extend current configuration by specifying maps as arguments
+    and optionally by reading from an additional hgrc file.
+
+    Override current keyword template maps with "default" option.
+    '''
+    def demostatus(stat):
+        ui.status(_('\n\t%s\n') % stat)
+
+    def demoitems(section, items):
+        ui.write('[%s]\n' % section)
+        for k, v in items:
+            ui.write('%s = %s\n' % (k, v))
+
+    msg = 'hg keyword config and expansion example'
+    kwstatus = 'current'
+    fn = 'demo.txt'
+    branchname = 'demobranch'
+    tmpdir = tempfile.mkdtemp('', 'kwdemo.')
+    ui.note(_('creating temporary repo at %s\n') % tmpdir)
+    repo = localrepo.localrepository(ui, path=tmpdir, create=True)
+    ui.setconfig('keyword', fn, '')
+    if args or opts.get('rcfile'):
+        kwstatus = 'custom'
+    if opts.get('rcfile'):
+        ui.readconfig(opts.get('rcfile'))
+    if opts.get('default'):
+        kwstatus = 'default'
+        kwmaps = kwtemplater.templates
+        if ui.configitems('keywordmaps'):
+            # override maps from optional rcfile
+            for k, v in kwmaps.iteritems():
+                ui.setconfig('keywordmaps', k, v)
+    elif args:
+        # simulate hgrc parsing
+        rcmaps = ['[keywordmaps]\n'] + [a + '\n' for a in args]
+        fp = repo.opener('hgrc', 'w')
+        fp.writelines(rcmaps)
+        fp.close()
+        ui.readconfig(repo.join('hgrc'))
+    if not opts.get('default'):
+        kwmaps = dict(ui.configitems('keywordmaps')) or kwtemplater.templates
+    reposetup(ui, repo)
+    for k, v in ui.configitems('extensions'):
+        if k.endswith('keyword'):
+            extension = '%s = %s' % (k, v)
+            break
+    demostatus('config using %s keyword template maps' % kwstatus)
+    ui.write('[extensions]\n%s\n' % extension)
+    demoitems('keyword', ui.configitems('keyword'))
+    demoitems('keywordmaps', kwmaps.iteritems())
+    keywords = '$' + '$\n$'.join(kwmaps.keys()) + '$\n'
+    repo.wopener(fn, 'w').write(keywords)
+    repo.add([fn])
+    path = repo.wjoin(fn)
+    ui.note(_('\n%s keywords written to %s:\n') % (kwstatus, path))
+    ui.note(keywords)
+    ui.note('\nhg -R "%s" branch "%s"\n' % (tmpdir, branchname))
+    # silence branch command if not verbose
+    quiet = ui.quiet
+    ui.quiet = not ui.verbose
+    commands.branch(ui, repo, branchname)
+    ui.quiet = quiet
+    for name, cmd in ui.configitems('hooks'):
+        if name.split('.', 1)[0].find('commit') > -1:
+            repo.ui.setconfig('hooks', name, '')
+    ui.note(_('unhooked all commit hooks\n'))
+    ui.note('hg -R "%s" ci -m "%s"\n' % (tmpdir, msg))
+    repo.commit(text=msg)
+    format = ui.verbose and ' in %s' % path or ''
+    demostatus('%s keywords expanded%s' % (kwstatus, format))
+    ui.write(repo.wread(fn))
+    ui.debug(_('\nremoving temporary repo %s\n') % tmpdir)
+    shutil.rmtree(tmpdir, ignore_errors=True)
+
+def expand(ui, repo, *pats, **opts):
+    '''expand keywords in working directory
+
+    Run after (re)enabling keyword expansion.
+
+    kwexpand refuses to run if given files contain local changes.
+    '''
+    # 3rd argument sets expansion to True
+    _kwfwrite(ui, repo, True, *pats, **opts)
+
+def files(ui, repo, *pats, **opts):
+    '''print files currently configured for keyword expansion
+
+    Crosscheck which files in working directory are potential targets for
+    keyword expansion.
+    That is, files matched by [keyword] config patterns but not symlinks.
+    '''
+    status = _status(ui, repo, *pats, **opts)
+    modified, added, removed, deleted, unknown, ignored, clean = status
+    files = modified + added + clean
+    if opts.get('untracked'):
+        files += unknown
+    files.sort()
+    kwfiles = [f for f in files if _iskwfile(f, repo._link)]
+    cwd = pats and repo.getcwd() or ''
+    kwfstats = not opts.get('ignore') and (('K', kwfiles),) or ()
+    if opts.get('all') or opts.get('ignore'):
+        kwfstats += (('I', [f for f in files if f not in kwfiles]),)
+    for char, filenames in kwfstats:
+        format = (opts.get('all') or ui.verbose) and '%s %%s\n' % char or '%s\n'
+        for f in filenames:
+            ui.write(format % repo.pathto(f, cwd))
+
+def shrink(ui, repo, *pats, **opts):
+    '''revert expanded keywords in working directory
+
+    Run before changing/disabling active keywords
+    or if you experience problems with "hg import" or "hg merge".
+
+    kwshrink refuses to run if given files contain local changes.
+    '''
+    # 3rd argument sets expansion to False
+    _kwfwrite(ui, repo, False, *pats, **opts)
+
+
+def reposetup(ui, repo):
+    '''Sets up repo as kwrepo for keyword substitution.
+    Overrides file method to return kwfilelog instead of filelog
+    if file matches user configuration.
+    Wraps commit to overwrite configured files with updated
+    keyword substitutions.
+    This is done for local repos only, and only if there are
+    files configured at all for keyword substitution.'''
+
+    if not repo.local():
+        return
+
+    hgcmd, func, args, opts, cmdopts = dispatch._parse(ui, sys.argv[1:])
+    if hgcmd in nokwcommands.split():
+        return
+
+    if hgcmd == 'diff':
+        # only expand if comparing against working dir
+        node1, node2 = cmdutil.revpair(repo, cmdopts.get('rev'))
+        if node2 is not None:
+            return
+        # shrink if rev is not current node
+        if node1 is not None and node1 != repo.changectx().node():
+            hgcmd = 'diff1'
+
+    inc, exc = [], ['.hgtags']
+    for pat, opt in ui.configitems('keyword'):
+        if opt != 'ignore':
+            inc.append(pat)
+        else:
+            exc.append(pat)
+    if not inc:
+        return
+
+    global _kwtemplater
+    _restricted = hgcmd in restricted.split()
+    _kwtemplater = kwtemplater(ui, repo, inc, exc, _restricted)
+
+    class kwrepo(repo.__class__):
+        def file(self, f, kwmatch=False):
+            if f[0] == '/':
+                f = f[1:]
+            if kwmatch or _kwtemplater.matcher(f):
+                return kwfilelog(self.sopener, f)
+            return filelog.filelog(self.sopener, f)
+
+        def wread(self, filename):
+            data = super(kwrepo, self).wread(filename)
+            if _restricted and _kwtemplater.matcher(filename):
+                return _kwtemplater.shrink(data)
+            return data
+
+        def commit(self, files=None, text='', user=None, date=None,
+                   match=util.always, force=False, force_editor=False,
+                   p1=None, p2=None, extra={}, empty_ok=False):
+            wlock = lock = None
+            _p1 = _p2 = None
+            try:
+                wlock = self.wlock()
+                lock = self.lock()
+                # store and postpone commit hooks
+                commithooks = {}
+                for name, cmd in ui.configitems('hooks'):
+                    if name.split('.', 1)[0] == 'commit':
+                        commithooks[name] = cmd
+                        ui.setconfig('hooks', name, None)
+                if commithooks:
+                    # store parents for commit hook environment
+                    if p1 is None:
+                        _p1, _p2 = repo.dirstate.parents()
+                    else:
+                        _p1, _p2 = p1, p2 or nullid
+                    _p1 = hex(_p1)
+                    if _p2 == nullid:
+                        _p2 = ''
+                    else:
+                        _p2 = hex(_p2)
+
+                node = super(kwrepo,
+                             self).commit(files=files, text=text, user=user,
+                                          date=date, match=match, force=force,
+                                          force_editor=force_editor,
+                                          p1=p1, p2=p2, extra=extra,
+                                          empty_ok=empty_ok)
+
+                # restore commit hooks
+                for name, cmd in commithooks.iteritems():
+                    ui.setconfig('hooks', name, cmd)
+                if node is not None:
+                    _overwrite(ui, self, node=node)
+                    repo.hook('commit', node=node, parent1=_p1, parent2=_p2)
+                return node
+            finally:
+                del wlock, lock
+
+    repo.__class__ = kwrepo
+    patch.patchfile.__init__ = _kwpatchfile_init
+
+
+cmdtable = {
+    'kwdemo':
+        (demo,
+         [('d', 'default', None, _('show default keyword template maps')),
+          ('f', 'rcfile', [], _('read maps from rcfile'))],
+         _('hg kwdemo [-d] [-f RCFILE] [TEMPLATEMAP]...')),
+    'kwexpand': (expand, commands.walkopts,
+                 _('hg kwexpand [OPTION]... [FILE]...')),
+    'kwfiles':
+        (files,
+         [('a', 'all', None, _('show keyword status flags of all files')),
+          ('i', 'ignore', None, _('show files excluded from expansion')),
+          ('u', 'untracked', None, _('additionally show untracked files')),
+         ] + commands.walkopts,
+         _('hg kwfiles [OPTION]... [FILE]...')),
+    'kwshrink': (shrink, commands.walkopts,
+                 _('hg kwshrink [OPTION]... [FILE]...')),
+}
--- a/hgext/mq.py	Thu Jul 26 07:56:27 2007 -0400
+++ b/hgext/mq.py	Wed Feb 06 19:57:52 2008 -0800
@@ -34,7 +34,7 @@
 from mercurial import repair
 import os, sys, re, errno
 
-commands.norepo += " qclone qversion"
+commands.norepo += " qclone"
 
 # Patch names looks like unix-file names.
 # They must be joinable with queue directory and result in the patch path.
@@ -224,7 +224,7 @@
         def write_list(items, path):
             fp = self.opener(path, 'w')
             for i in items:
-                print >> fp, i
+                fp.write("%s\n" % i)
             fp.close()
         if self.applied_dirty: write_list(map(str, self.applied), self.status_path)
         if self.series_dirty: write_list(self.full_series, self.series_path)
@@ -455,7 +455,8 @@
                     repo.dirstate.invalidate()
                 raise
         finally:
-            del lock, wlock, tr
+            del tr, lock, wlock
+            self.removeundo(repo)
 
     def _apply(self, repo, series, list=False, update_status=True,
                strict=False, patchdir=None, merge=None, all_files={}):
@@ -527,7 +528,6 @@
                 self.ui.warn("fuzz found when applying patch, stopping\n")
                 err = 1
                 break
-        self.removeundo(repo)
         return (err, n)
 
     def delete(self, repo, patches, opts):
@@ -587,7 +587,7 @@
             top = revlog.bin(self.applied[-1].rev)
             pp = repo.dirstate.parents()
             if top not in pp:
-                raise util.Abort(_("queue top not at same revision as working directory"))
+                raise util.Abort(_("working directory revision is not qtip"))
             return top
         return None
     def check_localchanges(self, repo, force=False, refresh=True):
@@ -600,9 +600,19 @@
                     raise util.Abort(_("local changes found"))
         return m, a, r, d
 
+    _reserved = ('series', 'status', 'guards')
+    def check_reserved_name(self, name):
+        if (name in self._reserved or name.startswith('.hg')
+            or name.startswith('.mq')):
+            raise util.Abort(_('"%s" cannot be used as the name of a patch')
+                             % name)
+
     def new(self, repo, patch, *pats, **opts):
         msg = opts.get('msg')
         force = opts.get('force')
+        user = opts.get('user')
+        date = opts.get('date')
+        self.check_reserved_name(patch)
         if os.path.exists(self.join(patch)):
             raise util.Abort(_('patch "%s" already exists') % patch)
         if opts.get('include') or opts.get('exclude') or pats:
@@ -610,15 +620,14 @@
             m, a, r, d = repo.status(files=fns, match=match)[:4]
         else:
             m, a, r, d = self.check_localchanges(repo, force)
+            fns, match, anypats = cmdutil.matchpats(repo, m + a + r)
         commitfiles = m + a + r
         self.check_toppatch(repo)
         wlock = repo.wlock()
         try:
             insert = self.full_series_end()
-            if msg:
-                n = repo.commit(commitfiles, msg, force=True)
-            else:
-                n = repo.commit(commitfiles, "[mq]: %s" % patch, force=True)
+            commitmsg = msg and msg or ("[mq]: %s" % patch)
+            n = repo.commit(commitfiles, commitmsg, user, date, match=match, force=True)
             if n == None:
                 raise util.Abort(_("repo commit failed"))
             self.full_series[insert:insert] = [patch]
@@ -627,6 +636,15 @@
             self.series_dirty = 1
             self.applied_dirty = 1
             p = self.opener(patch, "w")
+            if date:
+                p.write("# HG changeset patch\n")
+                if user:
+                    p.write("# User " + user + "\n")
+                p.write("# Date " + date + "\n")
+                p.write("\n")
+            elif user:
+                p.write("From: " + user + "\n")
+                p.write("\n")
             if msg:
                 msg = msg + "\n"
                 p.write(msg)
@@ -635,7 +653,7 @@
             r = self.qrepo()
             if r: r.add([patch])
             if commitfiles:
-                self.refresh(repo, short=True)
+                self.refresh(repo, short=True, git=opts.get('git'))
             self.removeundo(repo)
         finally:
             del wlock
@@ -654,6 +672,9 @@
 
             self.removeundo(repo)
             repair.strip(self.ui, repo, rev, backup)
+            # strip may have unbundled a set of backed up revisions after
+            # the actual strip
+            self.removeundo(repo)
         finally:
             del lock, wlock
 
@@ -810,9 +831,9 @@
             del wlock
 
     def pop(self, repo, patch=None, force=False, update=True, all=False):
-        def getfile(f, rev):
+        def getfile(f, rev, flags):
             t = repo.file(f).read(rev)
-            repo.wfile(f, "w").write(t)
+            repo.wwrite(f, t, flags)
 
         wlock = repo.wlock()
         try:
@@ -859,10 +880,16 @@
             start = info[0]
             rev = revlog.bin(info[1])
 
+            if update:
+                top = self.check_toppatch(repo)
+
+            if repo.changelog.heads(rev) != [revlog.bin(self.applied[-1].rev)]:
+                raise util.Abort("popping would remove a revision not "
+                                 "managed by this patch queue")
+
             # we know there are no local changes, so we can make a simplified
             # form of hg.update.
             if update:
-                top = self.check_toppatch(repo)
                 qp = self.qparents(repo, rev)
                 changes = repo.changelog.read(qp)
                 mmap = repo.manifest.read(changes[0])
@@ -870,10 +897,9 @@
                 if d:
                     raise util.Abort("deletions found between repo revs")
                 for f in m:
-                    getfile(f, mmap[f])
+                    getfile(f, mmap[f], mmap.flags(f))
                 for f in r:
-                    getfile(f, mmap[f])
-                    util.set_exec(repo.wjoin(f), mmap.execf(f))
+                    getfile(f, mmap[f], mmap.flags(f))
                 for f in m + r:
                     repo.dirstate.normal(f)
                 for f in a:
@@ -886,8 +912,8 @@
                     except: pass
                     repo.dirstate.forget(f)
                 repo.dirstate.setparents(qp, revlog.nullid)
+            del self.applied[start:end]
             self.strip(repo, rev, update=False, backup='strip')
-            del self.applied[start:end]
             if len(self.applied):
                 self.ui.write("Now at: %s\n" % self.applied[-1].name)
             else:
@@ -914,6 +940,8 @@
             self.check_toppatch(repo)
             (top, patchfn) = (self.applied[-1].rev, self.applied[-1].name)
             top = revlog.bin(top)
+            if repo.changelog.heads(top) != [top]:
+                raise util.Abort("cannot refresh a revision with children")
             cparents = repo.changelog.parents(top)
             patchparent = self.qparents(repo, top)
             message, comments, user, date, patchfound = self.readheaders(patchfn)
@@ -925,22 +953,59 @@
                 if line.startswith('diff --git'):
                     self.diffopts().git = True
                     break
+
+            msg = opts.get('msg', '').rstrip()
+            if msg and comments:
+                # Remove existing message, keeping the rest of the comments
+                # fields.
+                # If comments contains 'subject: ', message will prepend
+                # the field and a blank line.
+                if message:
+                    subj = 'subject: ' + message[0].lower()
+                    for i in xrange(len(comments)):
+                        if subj == comments[i].lower():
+                            del comments[i]
+                            message = message[2:]
+                            break
+                ci = 0
+                for mi in xrange(len(message)):
+                    while message[mi] != comments[ci]:
+                        ci += 1
+                    del comments[ci]
+
+            def setheaderfield(comments, prefixes, new):
+                # Update all references to a field in the patch header.
+                # If none found, add it email style.
+                res = False
+                for prefix in prefixes:
+                    for i in xrange(len(comments)):
+                        if comments[i].startswith(prefix):
+                            comments[i] = prefix + new
+                            res = True
+                            break
+                return res
+
+            newuser = opts.get('user')
+            if newuser:
+                if not setheaderfield(comments, ['From: ', '# User '], newuser):
+                    try:
+                        patchheaderat = comments.index('# HG changeset patch')
+                        comments.insert(patchheaderat + 1,'# User ' + newuser)
+                    except ValueError:
+                        comments = ['From: ' + newuser, ''] + comments
+                user = newuser
+
+            newdate = opts.get('date')
+            if newdate:
+                if setheaderfield(comments, ['# Date '], newdate):
+                    date = newdate
+
+            if msg:
+                comments.append(msg)
+
             patchf.seek(0)
             patchf.truncate()
 
-            msg = opts.get('msg', '').rstrip()
-            if msg:
-                if comments:
-                    # Remove existing message.
-                    ci = 0
-                    subj = None
-                    for mi in xrange(len(message)):
-                        if comments[ci].lower().startswith('subject: '):
-                            subj = comments[ci][9:]
-                        while message[mi] != comments[ci] and message[mi] != subj:
-                            ci += 1
-                        del comments[ci]
-                comments.append(msg)
             if comments:
                 comments = "\n".join(comments) + '\n\n'
                 patchf.write(comments)
@@ -1017,9 +1082,8 @@
                 copies = {}
                 for dst in a:
                     src = repo.dirstate.copied(dst)
-                    if src is None:
-                        continue
-                    copies.setdefault(src, []).append(dst)
+                    if src is not None:
+                        copies.setdefault(src, []).append(dst)
                     repo.dirstate.add(dst)
                 # remember the copies between patchparent and tip
                 # this may be slow, so don't do it if we're not tracking copies
@@ -1049,7 +1113,7 @@
                 for f in m:
                     repo.dirstate.normal(f)
                 for f in mm:
-                    repo.dirstate.normaldirty(f)
+                    repo.dirstate.normallookup(f)
                 for f in forget:
                     repo.dirstate.forget(f)
 
@@ -1061,12 +1125,16 @@
                 else:
                     message = msg
 
+                if not user:
+                    user = changes[1]
+
+                self.applied.pop()
+                self.applied_dirty = 1
                 self.strip(repo, top, update=False,
                            backup='strip')
-                n = repo.commit(filelist, message, changes[1], match=matchfn,
+                n = repo.commit(filelist, message, user, date, match=matchfn,
                                 force=1)
-                self.applied[-1] = statusentry(revlog.hex(n), patchfn)
-                self.applied_dirty = 1
+                self.applied.append(statusentry(revlog.hex(n), patchfn))
                 self.removeundo(repo)
             else:
                 self.printdiff(repo, patchparent, fp=patchf)
@@ -1216,7 +1284,7 @@
             self.ui.warn("saved queue repository parents: %s %s\n" %
                          (hg.short(qpp[0]), hg.short(qpp[1])))
             if qupdate:
-                print "queue directory updating"
+                self.ui.status(_("queue directory updating\n"))
                 r = self.qrepo()
                 if not r:
                     self.ui.warn("Unable to load queue repository\n")
@@ -1355,6 +1423,7 @@
 
                 if not patchname:
                     patchname = normname('%d.diff' % r)
+                self.check_reserved_name(patchname)
                 checkseries(patchname)
                 checkfile(patchname)
                 self.full_series.insert(0, patchname)
@@ -1377,6 +1446,7 @@
                     raise util.Abort(_('-e is incompatible with import from -'))
                 if not patchname:
                     patchname = normname(filename)
+                self.check_reserved_name(patchname)
                 if not os.path.isfile(self.join(patchname)):
                     raise util.Abort(_("patch %s does not exist") % patchname)
             else:
@@ -1391,6 +1461,7 @@
                     raise util.Abort(_("unable to read %s") % patchname)
                 if not patchname:
                     patchname = normname(os.path.basename(filename))
+                self.check_reserved_name(patchname)
                 checkfile(patchname)
                 patchf = self.opener(patchname, "w")
                 patchf.write(text)
@@ -1508,13 +1579,18 @@
     The patch directory must be a nested mercurial repository, as
     would be created by qinit -c.
     '''
+    def patchdir(repo):
+        url = repo.url()
+        if url.endswith('/'):
+            url = url[:-1]
+        return url + '/.hg/patches'
     cmdutil.setremoteconfig(ui, opts)
     if dest is None:
         dest = hg.defaultdest(source)
     sr = hg.repository(ui, ui.expandpath(source))
-    patchdir = opts['patches'] or (sr.url() + '/.hg/patches')
+    patchespath = opts['patches'] or patchdir(sr)
     try:
-        pr = hg.repository(ui, patchdir)
+        pr = hg.repository(ui, patchespath)
     except hg.RepoError:
         raise util.Abort(_('versioned patch repository not found'
                            ' (see qinit -c)'))
@@ -1535,10 +1611,8 @@
                       update=False,
                       stream=opts['uncompressed'])
     ui.note(_('cloning patch repo\n'))
-    spr, dpr = hg.clone(ui, opts['patches'] or (sr.url() + '/.hg/patches'),
-                        dr.url() + '/.hg/patches',
-                        pull=opts['pull'],
-                        update=not opts['noupdate'],
+    spr, dpr = hg.clone(ui, opts['patches'] or patchdir(sr), patchdir(dr),
+                        pull=opts['pull'], update=not opts['noupdate'],
                         stream=opts['uncompressed'])
     if dr.local():
         if qbase:
@@ -1593,6 +1667,13 @@
     return q.qseries(repo, start=l-2, length=1, status='A',
                      summary=opts.get('summary'))
 
+def setupheaderopts(ui, opts):
+    def do(opt,val):
+        if not opts[opt] and opts['current' + opt]:
+            opts[opt] = val
+    do('user', ui.username())
+    do('date', "%d %d" % util.makedate())
+
 def new(ui, repo, patch, *args, **opts):
     """create a new patch
 
@@ -1611,6 +1692,7 @@
     if opts['edit']:
         message = ui.edit(message, ui.username())
     opts['msg'] = message
+    setupheaderopts(ui, opts)
     q.new(repo, patch, *args, **opts)
     q.save_dirty()
     return 0
@@ -1628,11 +1710,15 @@
     q = repo.mq
     message = cmdutil.logmessage(opts)
     if opts['edit']:
+        if not q.applied:
+            ui.write(_("No patches applied\n"))
+            return 1
         if message:
             raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
         patch = q.applied[-1].name
         (message, comment, user, date, hasdiff) = q.readheaders(patch)
         message = ui.edit('\n'.join(message), user or ui.username())
+    setupheaderopts(ui, opts)
     ret = q.refresh(repo, pats, msg=message, **opts)
     q.save_dirty()
     return ret
@@ -2081,6 +2167,12 @@
                 return tagscache
 
             mqtags = [(revlog.bin(patch.rev), patch.name) for patch in q.applied]
+
+            if mqtags[-1][0] not in self.changelog.nodemap:
+                self.ui.warn('mq status file refers to unknown node %s\n'
+                             % revlog.short(mqtags[-1][0]))
+                return tagscache
+
             mqtags.append((mqtags[-1][0], 'qtip'))
             mqtags.append((mqtags[0][0], 'qbase'))
             mqtags.append((self.changelog.parents(mqtags[0][0])[0], 'qparent'))
@@ -2097,11 +2189,17 @@
             if not q.applied:
                 return super(mqrepo, self)._branchtags()
 
+            cl = self.changelog
+            qbasenode = revlog.bin(q.applied[0].rev)
+            if qbasenode not in cl.nodemap:
+                self.ui.warn('mq status file refers to unknown node %s\n'
+                             % revlog.short(qbasenode))
+                return super(mqrepo, self)._branchtags()
+
             self.branchcache = {} # avoid recursion in changectx
-            cl = self.changelog
             partial, last, lrev = self._readbranchcache()
 
-            qbase = cl.rev(revlog.bin(q.applied[0].rev))
+            qbase = cl.rev(qbasenode)
             start = lrev + 1
             if start < qbase:
                 # update the cache (excluding the patches) and save it
@@ -2123,6 +2221,12 @@
 
 seriesopts = [('s', 'summary', None, _('print first line of patch header'))]
 
+headeropts = [
+    ('U', 'currentuser', None, _('add "From: <current user>" to patch')),
+    ('u', 'user', '', _('add "From: <given user>" to patch')),
+    ('D', 'currentdate', None, _('add "Date: <current date>" to patch')),
+    ('d', 'date', '', _('add "Date: <given date>" to patch'))]
+
 cmdtable = {
     "qapplied": (applied, [] + seriesopts, _('hg qapplied [-s] [PATCH]')),
     "qclone":
@@ -2131,10 +2235,8 @@
           ('U', 'noupdate', None, _('do not update the new working directories')),
           ('', 'uncompressed', None,
            _('use uncompressed transfer (fast over LAN)')),
-          ('e', 'ssh', '', _('specify ssh command to use')),
           ('p', 'patches', '', _('location of source patch repo')),
-          ('', 'remotecmd', '',
-           _('specify hg command to run on the remote side'))],
+         ] + commands.remoteopts,
          _('hg qclone [OPTION]... SOURCE [DEST]')),
     "qcommit|qci":
         (commit,
@@ -2143,9 +2245,8 @@
     "^qdiff":
         (diff,
          [('g', 'git', None, _('use git extended diff format')),
-          ('I', 'include', [], _('include names matching the given patterns')),
-          ('X', 'exclude', [], _('exclude names matching the given patterns')),
-          ('U', 'unified', 3, _('number of lines of context to show'))],
+          ('U', 'unified', 3, _('number of lines of context to show')),
+         ] + commands.walkopts,
          _('hg qdiff [-I] [-X] [-U NUM] [-g] [FILE]...')),
     "qdelete|qremove|qrm":
         (delete,
@@ -2184,9 +2285,8 @@
         (new,
          [('e', 'edit', None, _('edit commit message')),
           ('f', 'force', None, _('import uncommitted changes into patch')),
-          ('I', 'include', [], _('include names matching the given patterns')),
-          ('X', 'exclude', [], _('exclude names matching the given patterns')),
-          ] + commands.commitopts,
+          ('g', 'git', None, _('use git extended diff format')),
+          ] + commands.walkopts + commands.commitopts + headeropts,
          _('hg qnew [-e] [-m TEXT] [-l FILE] [-f] PATCH [FILE]...')),
     "qnext": (next, [] + seriesopts, _('hg qnext [-s]')),
     "qprev": (prev, [] + seriesopts, _('hg qprev [-s]')),
@@ -2209,9 +2309,7 @@
          [('e', 'edit', None, _('edit commit message')),
           ('g', 'git', None, _('use git extended diff format')),
           ('s', 'short', None, _('refresh only files already in the patch')),
-          ('I', 'include', [], _('include names matching the given patterns')),
-          ('X', 'exclude', [], _('exclude names matching the given patterns')),
-          ] + commands.commitopts,
+          ] + commands.walkopts + commands.commitopts + headeropts,
          _('hg qrefresh [-I] [-X] [-e] [-m TEXT] [-l FILE] [-s] [FILE]...')),
     'qrename|qmv':
         (rename, [], _('hg qrename PATCH1 [PATCH2]')),
--- a/hgext/notify.py	Thu Jul 26 07:56:27 2007 -0400
+++ b/hgext/notify.py	Wed Feb 06 19:57:52 2008 -0800
@@ -135,7 +135,7 @@
     def fixmail(self, addr):
         '''try to clean up email addresses.'''
 
-        addr = templater.email(addr.strip())
+        addr = util.email(addr.strip())
         if self.domain:
             a = addr.find('@localhost')
             if a != -1:
@@ -231,7 +231,7 @@
         else:
             self.ui.status(_('notify: sending %d subscribers %d changes\n') %
                            (len(self.subs), count))
-            mail.sendmail(self.ui, templater.email(msg['From']),
+            mail.sendmail(self.ui, util.email(msg['From']),
                           self.subs, msgtext)
 
     def diff(self, node, ref):
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/hgext/parentrevspec.py	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,96 @@
+# Mercurial extension to make it easy to refer to the parent of a revision
+#
+# Copyright (C) 2007 Alexis S. L. Carvalho <alexis@cecm.usp.br>
+#
+# This software may be used and distributed according to the terms
+# of the GNU General Public License, incorporated herein by reference.
+'''\
+use suffixes to refer to ancestor revisions
+
+This extension allows you to use git-style suffixes to refer to
+the ancestors of a specific revision.
+
+For example, if you can refer to a revision as "foo", then:
+
+- foo^N = Nth parent of foo:
+  foo^0 = foo
+  foo^1 = first parent of foo
+  foo^2 = second parent of foo
+  foo^  = foo^1
+
+- foo~N = Nth first grandparent of foo
+  foo~0 = foo
+  foo~1 = foo^1 = foo^ = first parent of foo
+  foo~2 = foo^1^1 = foo^^ = first parent of first parent of foo
+'''
+import mercurial.repo
+
+def reposetup(ui, repo):
+    if not repo.local():
+        return
+
+    class parentrevspecrepo(repo.__class__):
+        def lookup(self, key):
+            try:
+                _super = super(parentrevspecrepo, self)
+                return _super.lookup(key)
+            except mercurial.repo.RepoError:
+                pass
+
+            circ = key.find('^')
+            tilde = key.find('~')
+            if circ < 0 and tilde < 0:
+                raise
+            elif circ >= 0 and tilde >= 0:
+                end = min(circ, tilde)
+            else:
+                end = max(circ, tilde)
+
+            cl = self.changelog
+            base = key[:end]
+            try:
+                node = _super.lookup(base)
+            except mercurial.repo.RepoError:
+                # eek - reraise the first error
+                return _super.lookup(key)
+
+            rev = cl.rev(node)
+            suffix = key[end:]
+            i = 0
+            while i < len(suffix):
+                # foo^N => Nth parent of foo
+                # foo^0 == foo
+                # foo^1 == foo^ == 1st parent of foo
+                # foo^2 == 2nd parent of foo
+                if suffix[i] == '^':
+                    j = i + 1
+                    p = cl.parentrevs(rev)
+                    if j < len(suffix) and suffix[j].isdigit():
+                        j += 1
+                        n = int(suffix[i+1:j])
+                        if n > 2 or n == 2 and p[1] == -1:
+                            raise
+                    else:
+                        n = 1
+                    if n:
+                        rev = p[n - 1]
+                    i = j
+                # foo~N => Nth first grandparent of foo
+                # foo~0 = foo
+                # foo~1 = foo^1 == foo^ == 1st parent of foo
+                # foo~2 = foo^1^1 == foo^^ == 1st parent of 1st parent of foo
+                elif suffix[i] == '~':
+                    j = i + 1
+                    while j < len(suffix) and suffix[j].isdigit():
+                        j += 1
+                    if j == i + 1:
+                        raise
+                    n = int(suffix[i+1:j])
+                    for k in xrange(n):
+                        rev = cl.parentrevs(rev)[0]
+                    i = j
+                else:
+                    raise
+            return cl.node(rev)
+
+    repo.__class__ = parentrevspecrepo
--- a/hgext/patchbomb.py	Thu Jul 26 07:56:27 2007 -0400
+++ b/hgext/patchbomb.py	Wed Feb 06 19:57:52 2008 -0800
@@ -115,24 +115,24 @@
     '''
 
     def prompt(prompt, default = None, rest = ': ', empty_ok = False):
-        try:
-            # readline gives raw_input editing capabilities, but is not
-            # present on windows
-            import readline
-        except ImportError: pass
-
-        if default: prompt += ' [%s]' % default
+        if not ui.interactive:
+            return default
+        if default:
+            prompt += ' [%s]' % default
         prompt += rest
         while True:
-            r = raw_input(prompt)
-            if r: return r
-            if default is not None: return default
-            if empty_ok: return r
+            r = ui.prompt(prompt, default=default)
+            if r:
+                return r
+            if default is not None:
+                return default
+            if empty_ok:
+                return r
             ui.warn(_('Please enter a valid value.\n'))
 
-    def confirm(s):
+    def confirm(s, denial):
         if not prompt(s, default = 'y', rest = '? ').lower().startswith('y'):
-            raise ValueError
+            raise util.Abort(denial)
 
     def cdiffstat(summary, patchlines):
         s = patch.diffstat(patchlines)
@@ -140,7 +140,11 @@
             if summary:
                 ui.write(summary, '\n')
                 ui.write(s, '\n')
-            confirm(_('Does the diffstat above look okay'))
+            confirm(_('Does the diffstat above look okay'),
+                    _('diffstat rejected'))
+        elif s is None:
+            ui.warn(_('No diffstat information available.\n'))
+            s = ''
         return s
 
     def makepatch(patch, idx, total):
@@ -149,27 +153,33 @@
         body = ''
         for line in patch:
             if line.startswith('#'):
-                if line.startswith('# Node ID'): node = line.split()[-1]
+                if line.startswith('# Node ID'):
+                    node = line.split()[-1]
                 continue
-            if (line.startswith('diff -r')
-                or line.startswith('diff --git')):
+            if line.startswith('diff -r') or line.startswith('diff --git'):
                 break
             desc.append(line)
-        if not node: raise ValueError
+        if not node:
+            raise ValueError
 
-        #body = ('\n'.join(desc[1:]).strip() or
-        #        'Patch subject is complete summary.')
-        #body += '\n\n\n'
+        if opts['attach']:
+             body = ('\n'.join(desc[1:]).strip() or
+                   'Patch subject is complete summary.')
+             body += '\n\n\n'
 
-        if opts['plain']:
-            while patch and patch[0].startswith('# '): patch.pop(0)
-            if patch: patch.pop(0)
-            while patch and not patch[0].strip(): patch.pop(0)
-        if opts['diffstat']:
+        if opts.get('plain'):
+            while patch and patch[0].startswith('# '):
+                patch.pop(0)
+            if patch:
+                patch.pop(0)
+            while patch and not patch[0].strip():
+                patch.pop(0)
+        if opts.get('diffstat'):
             body += cdiffstat('\n'.join(desc), patch) + '\n\n'
-        if opts['attach']:
+        if opts.get('attach') or opts.get('inline'):
             msg = email.MIMEMultipart.MIMEMultipart()
-            if body: msg.attach(email.MIMEText.MIMEText(body, 'plain'))
+            if body:
+                msg.attach(email.MIMEText.MIMEText(body, 'plain'))
             p = email.MIMEText.MIMEText('\n'.join(patch), 'x-patch')
             binnode = bin(node)
             # if node is mq patch, it will have patch file name as tag
@@ -179,10 +189,13 @@
                 patchname = patchname[0]
             elif total > 1:
                 patchname = cmdutil.make_filename(repo, '%b-%n.patch',
-                                                   binnode, idx, total)
+                                                  binnode, idx, total)
             else:
                 patchname = cmdutil.make_filename(repo, '%b.patch', binnode)
-            p['Content-Disposition'] = 'inline; filename=' + patchname
+            disposition = 'inline'
+            if opts['attach']:
+                disposition = 'attachment'
+            p['Content-Disposition'] = disposition + '; filename=' + patchname
             msg.attach(p)
         else:
             body += '\n'.join(patch)
@@ -190,7 +203,7 @@
 
         subj = desc[0].strip().rstrip('. ')
         if total == 1:
-            subj = '[PATCH] ' + (opts['subject'] or subj)
+            subj = '[PATCH] ' + (opts.get('subject') or subj)
         else:
             tlen = len(str(total))
             subj = '[PATCH %0*d of %d] %s' % (tlen, idx, total, subj)
@@ -216,7 +229,7 @@
         tmpfn = os.path.join(tmpdir, 'bundle')
         try:
             commands.bundle(ui, repo, tmpfn, dest, **opts)
-            return open(tmpfn).read()
+            return open(tmpfn, 'rb').read()
         finally:
             try:
                 os.unlink(tmpfn)
@@ -224,17 +237,18 @@
                 pass
             os.rmdir(tmpdir)
 
-    really_sending = not (opts['test'] or opts['mbox'])
-
-    if really_sending:
+    if not (opts.get('test') or opts.get('mbox')):
+        # really sending
         mail.validateconfig(ui)
 
-    if not (revs or opts.get('rev') or opts.get('outgoing')):
+    if not (revs or opts.get('rev')
+            or opts.get('outgoing') or opts.get('bundle')):
         raise util.Abort(_('specify at least one changeset with -r or -o'))
 
     cmdutil.setremoteconfig(ui, opts)
     if opts.get('outgoing') and opts.get('bundle'):
-        raise util.Abort(_("--outgoing mode always on with --bundle; do not re-specify --outgoing"))
+        raise util.Abort(_("--outgoing mode always on with --bundle;"
+                           " do not re-specify --outgoing"))
 
     if opts.get('outgoing') or opts.get('bundle'):
         if len(revs) > 1:
@@ -254,13 +268,22 @@
 
     # start
     if opts.get('date'):
-        start_time = util.parsedate(opts['date'])
+        start_time = util.parsedate(opts.get('date'))
     else:
         start_time = util.makedate()
 
     def genmsgid(id):
         return '<%s.%s@%s>' % (id[:20], int(start_time[0]), socket.getfqdn())
 
+    def getdescription(body, sender):
+        if opts.get('desc'):
+            body = open(opts.get('desc')).read()
+        else:
+            ui.write(_('\nWrite the introductory message for the '
+                       'patch series.\n\n'))
+            body = ui.edit(body, sender)
+        return body
+
     def getexportmsgs():
         patches = []
 
@@ -285,7 +308,8 @@
         jumbo = []
         msgs = []
 
-        ui.write(_('This patch series consists of %d patches.\n\n') % len(patches))
+        ui.write(_('This patch series consists of %d patches.\n\n')
+                 % len(patches))
 
         for p, i in zip(patches, xrange(len(patches))):
             jumbo.extend(p)
@@ -295,24 +319,18 @@
             tlen = len(str(len(patches)))
 
             subj = '[PATCH %0*d of %d] %s' % (
-                tlen, 0,
-                len(patches),
-                opts['subject'] or
-                prompt('Subject:', rest = ' [PATCH %0*d of %d] ' % (tlen, 0,
-                    len(patches))))
+                tlen, 0, len(patches),
+                opts.get('subject') or
+                prompt('Subject:',
+                       rest=' [PATCH %0*d of %d] ' % (tlen, 0, len(patches))))
 
             body = ''
-            if opts['diffstat']:
+            if opts.get('diffstat'):
                 d = cdiffstat(_('Final summary:\n'), jumbo)
-                if d: body = '\n' + d
+                if d:
+                    body = '\n' + d
 
-            if opts['desc']:
-                body = open(opts['desc']).read()
-            else:
-                ui.write(_('\nWrite the introductory message for the '
-                           'patch series.\n\n'))
-                body = ui.edit(body, sender)
-
+            body = getdescription(body, sender)
             msg = email.MIMEText.MIMEText(body)
             msg['Subject'] = subj
 
@@ -320,11 +338,10 @@
         return msgs
 
     def getbundlemsgs(bundle):
-        subj = (opts['subject']
+        subj = (opts.get('subject')
                 or prompt('Subject:', default='A bundle for your repository'))
-        ui.write(_('\nWrite the introductory message for the bundle.\n\n'))
-        body = ui.edit('', sender)
 
+        body = getdescription('', sender)
         msg = email.MIMEMultipart.MIMEMultipart()
         if body:
             msg.attach(email.MIMEText.MIMEText(body, 'plain'))
@@ -337,7 +354,7 @@
         msg['Subject'] = subj
         return [msg]
 
-    sender = (opts['from'] or ui.config('email', 'from') or
+    sender = (opts.get('from') or ui.config('email', 'from') or
               ui.config('patchbomb', 'from') or
               prompt('From', ui.username()))
 
@@ -347,25 +364,24 @@
         msgs = getexportmsgs()
 
     def getaddrs(opt, prpt, default = None):
-        addrs = opts[opt] or (ui.config('email', opt) or
-                              ui.config('patchbomb', opt) or
-                              prompt(prpt, default = default)).split(',')
+        addrs = opts.get(opt) or (ui.config('email', opt) or
+                                  ui.config('patchbomb', opt) or
+                                  prompt(prpt, default = default)).split(',')
         return [a.strip() for a in addrs if a.strip()]
 
     to = getaddrs('to', 'To')
     cc = getaddrs('cc', 'Cc', '')
 
-    bcc = opts['bcc'] or (ui.config('email', 'bcc') or
+    bcc = opts.get('bcc') or (ui.config('email', 'bcc') or
                           ui.config('patchbomb', 'bcc') or '').split(',')
     bcc = [a.strip() for a in bcc if a.strip()]
 
     ui.write('\n')
 
-    if really_sending:
-        mailer = mail.connect(ui)
     parent = None
 
     sender_addr = email.Utils.parseaddr(sender)[1]
+    sendmail = None
     for m in msgs:
         try:
             m['Message-Id'] = genmsgid(m['X-Mercurial-Node'])
@@ -376,14 +392,16 @@
         else:
             parent = m['Message-Id']
         m['Date'] = util.datestr(date=start_time,
-                format="%a, %d %b %Y %H:%M:%S", timezone=True)
+                                 format="%a, %d %b %Y %H:%M:%S", timezone=True)
 
         start_time = (start_time[0] + 1, start_time[1])
         m['From'] = sender
         m['To'] = ', '.join(to)
-        if cc: m['Cc']  = ', '.join(cc)
-        if bcc: m['Bcc'] = ', '.join(bcc)
-        if opts['test']:
+        if cc:
+            m['Cc']  = ', '.join(cc)
+        if bcc:
+            m['Bcc'] = ', '.join(bcc)
+        if opts.get('test'):
             ui.status('Displaying ', m['Subject'], ' ...\n')
             ui.flush()
             if 'PAGER' in os.environ:
@@ -398,25 +416,28 @@
                     raise
             if fp is not ui:
                 fp.close()
-        elif opts['mbox']:
+        elif opts.get('mbox'):
             ui.status('Writing ', m['Subject'], ' ...\n')
-            fp = open(opts['mbox'], m.has_key('In-Reply-To') and 'ab+' or 'wb+')
+            fp = open(opts.get('mbox'), 'In-Reply-To' in m and 'ab+' or 'wb+')
             date = util.datestr(date=start_time,
-                    format='%a %b %d %H:%M:%S %Y', timezone=False)
+                                format='%a %b %d %H:%M:%S %Y', timezone=False)
             fp.write('From %s %s\n' % (sender_addr, date))
             fp.write(m.as_string(0))
             fp.write('\n\n')
             fp.close()
         else:
+            if not sendmail:
+                sendmail = mail.connect(ui)
             ui.status('Sending ', m['Subject'], ' ...\n')
             # Exim does not remove the Bcc field
             del m['Bcc']
-            mailer.sendmail(sender, to + bcc + cc, m.as_string(0))
+            sendmail(sender, to + bcc + cc, m.as_string(0))
 
 cmdtable = {
     "email":
         (patchbomb,
-         [('a', 'attach', None, _('send patches as inline attachments')),
+         [('a', 'attach', None, _('send patches as attachments')),
+          ('i', 'inline', None, _('send patches as inline attachments')),
           ('', 'bcc', [], _('email addresses of blind copy recipients')),
           ('c', 'cc', [], _('email addresses of copy recipients')),
           ('d', 'diffstat', None, _('add diffstat output to messages')),
--- a/hgext/purge.py	Thu Jul 26 07:56:27 2007 -0400
+++ b/hgext/purge.py	Wed Feb 06 19:57:52 2008 -0800
@@ -27,7 +27,7 @@
 # along with this program; if not, write to the Free Software
 # Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
 
-from mercurial import hg, util
+from mercurial import hg, util, commands
 from mercurial.i18n import _
 import os
 
@@ -49,6 +49,9 @@
         else:
             ui.write('%s%s' % (name, eol))
 
+    if not force:
+        _check_fs(ui, repo)
+
     directories = []
     files = []
     missing = []
@@ -63,8 +66,6 @@
         elif src == 'f' and f not in repo.dirstate:
             files.append(f)
 
-    _check_missing(ui, repo, missing, force)
-
     directories.sort()
 
     for f in files:
@@ -77,7 +78,7 @@
             ui.note(_('Removing directory %s\n') % f)
             remove(os.rmdir, f)
 
-def _check_missing(ui, repo, missing, force=False):
+def _check_fs(ui, repo):
     """Abort if there is the chance of having problems with name-mangling fs
 
     In a name mangling filesystem (e.g. a case insensitive one)
@@ -85,34 +86,18 @@
     stored in the dirstate. This already confuses the status and
     add commands, but with purge this may cause data loss.
 
-    To prevent this, _check_missing will abort if there are missing
-    files. The force option will let the user skip the check if he
-    knows it is safe.
-
-    Even with the force option this function will check if any of the
-    missing files is still available in the working dir: if so there
-    may be some problem with the underlying filesystem, so it
-    aborts unconditionally."""
-
-    found = [f for f in missing if util.lexists(repo.wjoin(f))]
+    To prevent this, this function will abort if there are uncommitted
+    changes.
+    """
 
-    if found:
-        if not ui.quiet:
-            ui.warn(_("The following tracked files weren't listed by the "
-                      "filesystem, but could still be found:\n"))
-            for f in found:
-                ui.warn("%s\n" % f)
-            if util.checkfolding(repo.path):
-                ui.warn(_("This is probably due to a case-insensitive "
-                          "filesystem\n"))
-        raise util.Abort(_("purging on name mangling filesystems is not "
-                           "yet fully supported"))
-
-    if missing and not force:
-        raise util.Abort(_("there are missing files in the working dir and "
-                           "purge still has problems with them due to name "
-                           "mangling filesystems. "
-                           "Use --force if you know what you are doing"))
+    # We can't use (files, match) to do a partial walk here - we wouldn't
+    # notice a modified README file if the user ran "hg purge readme"
+    modified, added, removed, deleted = repo.status()[:4]
+    if modified or added or removed or deleted:
+        if not util.checkfolding(repo.path) and not ui.quiet:
+            ui.warn(_("Purging on name mangling filesystems is not "
+                      "fully supported.\n"))
+        raise util.Abort(_("outstanding uncommitted changes"))
 
 
 def purge(ui, repo, *dirs, **opts):
@@ -158,11 +143,10 @@
         (purge,
          [('a', 'abort-on-err', None, _('abort if an error occurs')),
           ('',  'all', None, _('purge ignored files too')),
-          ('f', 'force', None, _('purge even when missing files are detected')),
+          ('f', 'force', None, _('purge even when there are uncommitted changes')),
           ('p', 'print', None, _('print the file names instead of deleting them')),
           ('0', 'print0', None, _('end filenames with NUL, for use with xargs'
                                   ' (implies -p)')),
-          ('I', 'include', [], _('include names matching the given patterns')),
-          ('X', 'exclude', [], _('exclude names matching the given patterns'))],
+         ] + commands.walkopts,
          _('hg purge [OPTION]... [DIR]...'))
 }
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/hgext/record.py	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,525 @@
+# record.py
+#
+# Copyright 2007 Bryan O'Sullivan <bos@serpentine.com>
+#
+# This software may be used and distributed according to the terms of
+# the GNU General Public License, incorporated herein by reference.
+
+'''interactive change selection during commit or qrefresh'''
+
+from mercurial.i18n import _
+from mercurial import cmdutil, commands, cmdutil, extensions, hg, mdiff, patch, revlog
+from mercurial import util
+import copy, cStringIO, errno, operator, os, re, shutil, tempfile
+
+lines_re = re.compile(r'@@ -(\d+),(\d+) \+(\d+),(\d+) @@\s*(.*)')
+
+def scanpatch(fp):
+    """like patch.iterhunks, but yield different events
+
+    - ('file',    [header_lines + fromfile + tofile])
+    - ('context', [context_lines])
+    - ('hunk',    [hunk_lines])
+    - ('range',   (-start,len, +start,len, diffp))
+    """
+    lr = patch.linereader(fp)
+
+    def scanwhile(first, p):
+        """scan lr while predicate holds"""
+        lines = [first]
+        while True:
+            line = lr.readline()
+            if not line:
+                break
+            if p(line):
+                lines.append(line)
+            else:
+                lr.push(line)
+                break
+        return lines
+
+    while True:
+        line = lr.readline()
+        if not line:
+            break
+        if line.startswith('diff --git a/'):
+            def notheader(line):
+                s = line.split(None, 1)
+                return not s or s[0] not in ('---', 'diff')
+            header = scanwhile(line, notheader)
+            fromfile = lr.readline()
+            if fromfile.startswith('---'):
+                tofile = lr.readline()
+                header += [fromfile, tofile]
+            else:
+                lr.push(fromfile)
+            yield 'file', header
+        elif line[0] == ' ':
+            yield 'context', scanwhile(line, lambda l: l[0] in ' \\')
+        elif line[0] in '-+':
+            yield 'hunk', scanwhile(line, lambda l: l[0] in '-+\\')
+        else:
+            m = lines_re.match(line)
+            if m:
+                yield 'range', m.groups()
+            else:
+                raise patch.PatchError('unknown patch content: %r' % line)
+
+class header(object):
+    """patch header
+    
+    XXX shoudn't we move this to mercurial/patch.py ? 
+    """
+    diff_re = re.compile('diff --git a/(.*) b/(.*)$')
+    allhunks_re = re.compile('(?:index|new file|deleted file) ')
+    pretty_re = re.compile('(?:new file|deleted file) ')
+    special_re = re.compile('(?:index|new|deleted|copy|rename) ')
+
+    def __init__(self, header):
+        self.header = header
+        self.hunks = []
+
+    def binary(self):
+        for h in self.header:
+            if h.startswith('index '):
+                return True
+
+    def pretty(self, fp):
+        for h in self.header:
+            if h.startswith('index '):
+                fp.write(_('this modifies a binary file (all or nothing)\n'))
+                break
+            if self.pretty_re.match(h):
+                fp.write(h)
+                if self.binary():
+                    fp.write(_('this is a binary file\n'))
+                break
+            if h.startswith('---'):
+                fp.write(_('%d hunks, %d lines changed\n') %
+                         (len(self.hunks),
+                          sum([h.added + h.removed for h in self.hunks])))
+                break
+            fp.write(h)
+
+    def write(self, fp):
+        fp.write(''.join(self.header))
+
+    def allhunks(self):
+        for h in self.header:
+            if self.allhunks_re.match(h):
+                return True
+
+    def files(self):
+        fromfile, tofile = self.diff_re.match(self.header[0]).groups()
+        if fromfile == tofile:
+            return [fromfile]
+        return [fromfile, tofile]
+
+    def filename(self):
+        return self.files()[-1]
+
+    def __repr__(self):
+        return '<header %s>' % (' '.join(map(repr, self.files())))
+
+    def special(self):
+        for h in self.header:
+            if self.special_re.match(h):
+                return True
+
+def countchanges(hunk):
+    """hunk -> (n+,n-)"""
+    add = len([h for h in hunk if h[0] == '+'])
+    rem = len([h for h in hunk if h[0] == '-'])
+    return add, rem
+
+class hunk(object):
+    """patch hunk
+    
+    XXX shouldn't we merge this with patch.hunk ?
+    """
+    maxcontext = 3
+
+    def __init__(self, header, fromline, toline, proc, before, hunk, after):
+        def trimcontext(number, lines):
+            delta = len(lines) - self.maxcontext
+            if False and delta > 0:
+                return number + delta, lines[:self.maxcontext]
+            return number, lines
+
+        self.header = header
+        self.fromline, self.before = trimcontext(fromline, before)
+        self.toline, self.after = trimcontext(toline, after)
+        self.proc = proc
+        self.hunk = hunk
+        self.added, self.removed = countchanges(self.hunk)
+
+    def write(self, fp):
+        delta = len(self.before) + len(self.after)
+        fromlen = delta + self.removed
+        tolen = delta + self.added
+        fp.write('@@ -%d,%d +%d,%d @@%s\n' %
+                 (self.fromline, fromlen, self.toline, tolen,
+                  self.proc and (' ' + self.proc)))
+        fp.write(''.join(self.before + self.hunk + self.after))
+
+    pretty = write
+
+    def filename(self):
+        return self.header.filename()
+
+    def __repr__(self):
+        return '<hunk %r@%d>' % (self.filename(), self.fromline)
+
+def parsepatch(fp):
+    """patch -> [] of hunks """
+    class parser(object):
+        """patch parsing state machine"""
+        def __init__(self):
+            self.fromline = 0
+            self.toline = 0
+            self.proc = ''
+            self.header = None
+            self.context = []
+            self.before = []
+            self.hunk = []
+            self.stream = []
+
+        def addrange(self, (fromstart, fromend, tostart, toend, proc)):
+            self.fromline = int(fromstart)
+            self.toline = int(tostart)
+            self.proc = proc
+
+        def addcontext(self, context):
+            if self.hunk:
+                h = hunk(self.header, self.fromline, self.toline, self.proc,
+                         self.before, self.hunk, context)
+                self.header.hunks.append(h)
+                self.stream.append(h)
+                self.fromline += len(self.before) + h.removed
+                self.toline += len(self.before) + h.added
+                self.before = []
+                self.hunk = []
+                self.proc = ''
+            self.context = context
+
+        def addhunk(self, hunk):
+            if self.context:
+                self.before = self.context
+                self.context = []
+            self.hunk = data
+
+        def newfile(self, hdr):
+            self.addcontext([])
+            h = header(hdr)
+            self.stream.append(h)
+            self.header = h
+
+        def finished(self):
+            self.addcontext([])
+            return self.stream
+
+        transitions = {
+            'file': {'context': addcontext,
+                     'file': newfile,
+                     'hunk': addhunk,
+                     'range': addrange},
+            'context': {'file': newfile,
+                        'hunk': addhunk,
+                        'range': addrange},
+            'hunk': {'context': addcontext,
+                     'file': newfile,
+                     'range': addrange},
+            'range': {'context': addcontext,
+                      'hunk': addhunk},
+            }
+
+    p = parser()
+
+    state = 'context'
+    for newstate, data in scanpatch(fp):
+        try:
+            p.transitions[state][newstate](p, data)
+        except KeyError:
+            raise patch.PatchError('unhandled transition: %s -> %s' %
+                                   (state, newstate))
+        state = newstate
+    return p.finished()
+
+def filterpatch(ui, chunks):
+    """Interactively filter patch chunks into applied-only chunks"""
+    chunks = list(chunks)
+    chunks.reverse()
+    seen = {}
+    def consumefile():
+        """fetch next portion from chunks until a 'header' is seen
+        NB: header == new-file mark
+        """
+        consumed = []
+        while chunks:
+            if isinstance(chunks[-1], header):
+                break
+            else:
+                consumed.append(chunks.pop())
+        return consumed
+
+    resp_all = [None]   # this two are changed from inside prompt,
+    resp_file = [None]  # so can't be usual variables
+    applied = {}        # 'filename' -> [] of chunks
+    def prompt(query):
+        """prompt query, and process base inputs
+        
+        - y/n for the rest of file
+        - y/n for the rest
+        - ? (help)
+        - q (quit)
+
+        else, input is returned to the caller.
+        """
+        if resp_all[0] is not None:
+            return resp_all[0]
+        if resp_file[0] is not None:
+            return resp_file[0]
+        while True:
+            r = (ui.prompt(query + _(' [Ynsfdaq?] '), '(?i)[Ynsfdaq?]?$')
+                 or 'y').lower()
+            if r == '?':
+                c = record.__doc__.find('y - record this change')
+                for l in record.__doc__[c:].splitlines():
+                    if l: ui.write(_(l.strip()), '\n')
+                continue
+            elif r == 's':
+                r = resp_file[0] = 'n'
+            elif r == 'f':
+                r = resp_file[0] = 'y'
+            elif r == 'd':
+                r = resp_all[0] = 'n'
+            elif r == 'a':
+                r = resp_all[0] = 'y'
+            elif r == 'q':
+                raise util.Abort(_('user quit'))
+            return r
+    while chunks:
+        chunk = chunks.pop()
+        if isinstance(chunk, header):
+            # new-file mark
+            resp_file = [None]
+            fixoffset = 0
+            hdr = ''.join(chunk.header)
+            if hdr in seen:
+                consumefile()
+                continue
+            seen[hdr] = True
+            if resp_all[0] is None:
+                chunk.pretty(ui)
+            r = prompt(_('examine changes to %s?') %
+                       _(' and ').join(map(repr, chunk.files())))
+            if r == 'y':
+                applied[chunk.filename()] = [chunk]
+                if chunk.allhunks():
+                    applied[chunk.filename()] += consumefile()
+            else:
+                consumefile()
+        else:
+            # new hunk
+            if resp_file[0] is None and resp_all[0] is None:
+                chunk.pretty(ui)
+            r = prompt(_('record this change to %r?') %
+                       chunk.filename())
+            if r == 'y':
+                if fixoffset:
+                    chunk = copy.copy(chunk)
+                    chunk.toline += fixoffset
+                applied[chunk.filename()].append(chunk)
+            else:
+                fixoffset += chunk.removed - chunk.added
+    return reduce(operator.add, [h for h in applied.itervalues()
+                                 if h[0].special() or len(h) > 1], [])
+
+def record(ui, repo, *pats, **opts):
+    '''interactively select changes to commit
+
+    If a list of files is omitted, all changes reported by "hg status"
+    will be candidates for recording.
+
+    You will be prompted for whether to record changes to each
+    modified file, and for files with multiple changes, for each
+    change to use.  For each query, the following responses are
+    possible:
+
+    y - record this change
+    n - skip this change
+
+    s - skip remaining changes to this file
+    f - record remaining changes to this file
+
+    d - done, skip remaining changes and files
+    a - record all changes to all remaining files
+    q - quit, recording no changes
+
+    ? - display help'''
+
+    def record_committer(ui, repo, pats, opts):
+        commands.commit(ui, repo, *pats, **opts)
+
+    dorecord(ui, repo, record_committer, *pats, **opts)
+
+
+def qrecord(ui, repo, patch, *pats, **opts):
+    '''interactively record a new patch
+
+    see 'hg help qnew' & 'hg help record' for more information and usage
+    '''
+
+    try:
+        mq = extensions.find('mq')
+    except KeyError:
+        raise util.Abort(_("'mq' extension not loaded"))
+
+    def qrecord_committer(ui, repo, pats, opts):
+        mq.new(ui, repo, patch, *pats, **opts)
+
+    opts = opts.copy()
+    opts['force'] = True    # always 'qnew -f'
+    dorecord(ui, repo, qrecord_committer, *pats, **opts)
+
+
+def dorecord(ui, repo, committer, *pats, **opts):
+    if not ui.interactive:
+        raise util.Abort(_('running non-interactively, use commit instead'))
+
+    def recordfunc(ui, repo, files, message, match, opts):
+        """This is generic record driver.
+
+        It's job is to interactively filter local changes, and accordingly
+        prepare working dir into a state, where the job can be delegated to
+        non-interactive commit command such as 'commit' or 'qrefresh'.
+
+        After the actual job is done by non-interactive command, working dir
+        state is restored to original.
+
+        In the end we'll record intresting changes, and everything else will be
+        left in place, so the user can continue his work.
+        """
+        if files:
+            changes = None
+        else:
+            changes = repo.status(files=files, match=match)[:5]
+            modified, added, removed = changes[:3]
+            files = modified + added + removed
+        diffopts = mdiff.diffopts(git=True, nodates=True)
+        fp = cStringIO.StringIO()
+        patch.diff(repo, repo.dirstate.parents()[0], files=files,
+                   match=match, changes=changes, opts=diffopts, fp=fp)
+        fp.seek(0)
+
+        # 1. filter patch, so we have intending-to apply subset of it
+        chunks = filterpatch(ui, parsepatch(fp))
+        del fp
+
+        contenders = {}
+        for h in chunks:
+            try: contenders.update(dict.fromkeys(h.files()))
+            except AttributeError: pass
+
+        newfiles = [f for f in files if f in contenders]
+
+        if not newfiles:
+            ui.status(_('no changes to record\n'))
+            return 0
+
+        if changes is None:
+            changes = repo.status(files=newfiles, match=match)[:5]
+        modified = dict.fromkeys(changes[0])
+
+        # 2. backup changed files, so we can restore them in the end
+        backups = {}
+        backupdir = repo.join('record-backups')
+        try:
+            os.mkdir(backupdir)
+        except OSError, err:
+            if err.errno != errno.EEXIST:
+                raise
+        try:
+            # backup continues
+            for f in newfiles:
+                if f not in modified:
+                    continue
+                fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_')+'.',
+                                               dir=backupdir)
+                os.close(fd)
+                ui.debug('backup %r as %r\n' % (f, tmpname))
+                util.copyfile(repo.wjoin(f), tmpname)
+                backups[f] = tmpname
+
+            fp = cStringIO.StringIO()
+            for c in chunks:
+                if c.filename() in backups:
+                    c.write(fp)
+            dopatch = fp.tell()
+            fp.seek(0)
+
+            # 3a. apply filtered patch to clean repo  (clean)
+            if backups:
+                hg.revert(repo, repo.dirstate.parents()[0], backups.has_key)
+
+            # 3b. (apply)
+            if dopatch:
+                ui.debug('applying patch\n')
+                ui.debug(fp.getvalue())
+                patch.internalpatch(fp, ui, 1, repo.root)
+            del fp
+
+            # 4. We prepared working directory according to filtered patch.
+            #    Now is the time to delegate the job to commit/qrefresh or the like!
+
+            # it is important to first chdir to repo root -- we'll call a
+            # highlevel command with list of pathnames relative to repo root
+            cwd = os.getcwd()
+            os.chdir(repo.root)
+            try:
+                committer(ui, repo, newfiles, opts)
+            finally:
+                os.chdir(cwd)
+
+            return 0
+        finally:
+            # 5. finally restore backed-up files
+            try:
+                for realname, tmpname in backups.iteritems():
+                    ui.debug('restoring %r to %r\n' % (tmpname, realname))
+                    util.copyfile(tmpname, repo.wjoin(realname))
+                    os.unlink(tmpname)
+                os.rmdir(backupdir)
+            except OSError:
+                pass
+    return cmdutil.commit(ui, repo, recordfunc, pats, opts)
+
+cmdtable = {
+    "record":
+        (record,
+
+         # add commit options
+         commands.table['^commit|ci'][1],
+
+         _('hg record [OPTION]... [FILE]...')),
+}
+
+
+def extsetup():
+    try:
+        mq = extensions.find('mq')
+    except KeyError:
+        return
+
+    qcmdtable = {
+    "qrecord":
+        (qrecord,
+
+         # add qnew options, except '--force'
+         [opt for opt in mq.cmdtable['qnew'][1] if opt[1] != 'force'],
+
+         _('hg qrecord [OPTION]... PATCH [FILE]...')),
+    }
+
+    cmdtable.update(qcmdtable)
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/hgext/win32mbcs.py	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,158 @@
+# win32mbcs.py -- MBCS filename support for Mercurial on Windows
+#
+# Copyright (c) 2008 Shun-ichi Goto <shunichi.goto@gmail.com>
+#
+# Version: 0.1
+# Author:  Shun-ichi Goto <shunichi.goto@gmail.com>
+#
+# This software may be used and distributed according to the terms
+# of the GNU General Public License, incorporated herein by reference.
+#
+"""Allow to use shift_jis/big5 filenames on Windows.
+
+There is a well known issue "0x5c problem" on Windows.  It is a
+trouble on handling path name as raw encoded byte sequence of
+problematic encodings like shift_jis or big5.  The primary intent
+of this extension is to allow using such a encoding on Mercurial
+without strange file operation error.
+
+By enabling this extension, hook mechanism is activated and some
+functions are altered.  Usually, this encoding is your local encoding
+on your system by default. So you can get benefit simply by enabling
+this extension.
+
+The encoding for filename is same one for terminal by default.  You
+can change the encoding by setting HGENCODING environment variable.
+
+This extension is usefull for:
+ * Japanese Windows user using shift_jis encoding.
+ * Chinese Windows user using big5 encoding.
+ * Users who want to use a repository created with such a encoding.
+
+Note: Unix people does not need to use this extension.
+
+"""
+
+import os
+from mercurial.i18n import _
+from mercurial import util
+
+__all__ = ['install', 'uninstall', 'reposetup']
+
+
+# codec and alias names of sjis and big5 to be faked.
+_problematic_encodings = util.frozenset([
+        'big5', 'big5-tw', 'csbig5',
+        'big5hkscs', 'big5-hkscs', 'hkscs',
+        'cp932', '932', 'ms932', 'mskanji', 'ms-kanji',
+        'shift_jis', 'csshiftjis', 'shiftjis', 'sjis', 's_jis',
+        'shift_jis_2004', 'shiftjis2004', 'sjis_2004', 'sjis2004',
+        'shift_jisx0213', 'shiftjisx0213', 'sjisx0213', 's_jisx0213',
+        ])
+
+# attribute name to store original function
+_ORIGINAL = '_original'
+
+_ui = None
+
+def decode_with_check(arg):
+    if isinstance(arg, tuple):
+        return tuple(map(decode_with_check, arg))
+    elif isinstance(arg, list):
+        return map(decode_with_check, arg)
+    elif isinstance(arg, str):
+        uarg = arg.decode(util._encoding)
+        if arg == uarg.encode(util._encoding):
+            return uarg
+        else:
+            raise UnicodeError("Not local encoding")
+    else:
+        return arg
+
+def encode_with_check(arg):
+    if isinstance(arg, tuple):
+        return tuple(map(encode_with_check, arg))
+    elif isinstance(arg, list):
+        return map(encode_with_check, arg)
+    elif isinstance(arg, unicode):
+        ret = arg.encode(util._encoding)
+        return ret
+    else:
+        return arg
+
+def wrap(func):
+    
+    def wrapped(*args):
+        # check argument is unicode, then call original
+        for arg in args:
+            if isinstance(arg, unicode):
+                return func(*args)
+        # make decoded argument list into uargs
+        try:
+            args = decode_with_check(args)
+        except UnicodeError, exc:
+            # If not encoded with _local_fs_encoding, report it then
+            # continue with calling original function.
+            _ui.warn(_("WARNING: [win32mbcs] filename conversion fail for" +
+                     " %s: '%s'\n") % (util._encoding, args))
+            return func(*args)
+        # call as unicode operation, then return with encoding
+        return encode_with_check(func(*args))
+
+    # fake is only for relevant environment.
+    if hasattr(func, _ORIGINAL) or \
+            util._encoding.lower() not in _problematic_encodings:
+        return func
+    else:
+        f = wrapped
+        f.__name__ = func.__name__
+        setattr(f, _ORIGINAL, func)   # hold original to restore
+        return f
+
+def unwrap(func):
+    return getattr(func, _ORIGINAL, func)
+
+def install():
+    # wrap some python functions and mercurial functions
+    # to handle raw bytes on Windows.
+    # NOTE: dirname and basename is safe because they use result
+    # of os.path.split()
+    global _ui
+    if not _ui:
+        from mercurial import ui
+        _ui = ui.ui()
+    os.path.join = wrap(os.path.join)
+    os.path.split = wrap(os.path.split) 
+    os.path.splitext = wrap(os.path.splitext)
+    os.path.splitunc = wrap(os.path.splitunc)
+    os.path.normpath = wrap(os.path.normpath)
+    os.path.normcase = wrap(os.path.normcase)
+    os.makedirs = wrap(os.makedirs)
+    util.endswithsep = wrap(util.endswithsep)
+    util.splitpath = wrap(util.splitpath)
+
+def uninstall():
+    # restore original functions.
+    os.path.join = unwrap(os.path.join)
+    os.path.split = unwrap(os.path.split) 
+    os.path.splitext = unwrap(os.path.splitext)
+    os.path.splitunc = unwrap(os.path.splitunc)
+    os.path.normpath = unwrap(os.path.normpath)
+    os.path.normcase = unwrap(os.path.normcase)
+    os.makedirs = unwrap(os.makedirs)
+    util.endswithsep = unwrap(util.endswithsep)
+    util.splitpath = unwrap(util.splitpath)
+
+
+def reposetup(ui, repo):
+    # TODO: decide use of config section for this extension
+    global _ui
+    _ui = ui
+    if not os.path.supports_unicode_filenames:
+        ui.warn(_("[win32mbcs] cannot activate on this platform.\n"))
+        return
+    # install features of this extension
+    install()
+    ui.debug(_("[win32mbcs] activeted with encoding: %s\n") % util._encoding)
+
+# win32mbcs.py ends here
--- a/hgext/win32text.py	Thu Jul 26 07:56:27 2007 -0400
+++ b/hgext/win32text.py	Wed Feb 06 19:57:52 2008 -0800
@@ -1,22 +1,46 @@
+# win32text.py - LF <-> CRLF translation utilities for Windows users
+#
+# This software may be used and distributed according to the terms
+# of the GNU General Public License, incorporated herein by reference.
+#
+# To perform automatic newline conversion, use:
+#
+# [extensions]
+# hgext.win32text =
+# [encode]
+# ** = cleverencode:
+# [decode]
+# ** = cleverdecode:
+#
+# If not doing conversion, to make sure you do not commit CRLF by accident:
+#
+# [hooks]
+# pretxncommit.crlf = python:hgext.win32text.forbidcrlf
+#
+# To do the same check on a server to prevent CRLF from being pushed or pulled:
+#
+# [hooks]
+# pretxnchangegroup.crlf = python:hgext.win32text.forbidcrlf
+
 from mercurial import util, ui
 from mercurial.i18n import gettext as _
+from mercurial.node import *
 import re
 
 # regexp for single LF without CR preceding.
 re_single_lf = re.compile('(^|[^\r])\n', re.MULTILINE)
 
-def dumbdecode(s, cmd):
+def dumbdecode(s, cmd, ui=None, repo=None, filename=None, **kwargs):
     # warn if already has CRLF in repository.
     # it might cause unexpected eol conversion.
     # see issue 302:
     #   http://www.selenic.com/mercurial/bts/issue302
-    if '\r\n' in s:
-        u = ui.ui()
-        u.warn(_('WARNING: file in repository already has CRLF line ending \n'
-                 ' which does not need eol conversion by win32text plugin.\n'
-                 ' Please reconsider encode/decode setting in'
-                 ' mercurial.ini or .hg/hgrc\n'
-                 ' before next commit.\n'))
+    if '\r\n' in s and ui and filename and repo:
+        ui.warn(_('WARNING: %s already has CRLF line endings\n'
+                  'and does not need EOL conversion by the win32text plugin.\n'
+                  'Before your next commit, please reconsider your '
+                  'encode/decode settings in \nMercurial.ini or %s.\n') %
+                (filename, repo.join('hgrc')))
     # replace single LF to CRLF
     return re_single_lf.sub('\\1\r\n', s)
 
@@ -27,9 +51,9 @@
     if '\0' in s: return False
     return True
 
-def cleverdecode(s, cmd):
+def cleverdecode(s, cmd, **kwargs):
     if clevertest(s, cmd):
-        return dumbdecode(s, cmd)
+        return dumbdecode(s, cmd, **kwargs)
     return s
 
 def cleverencode(s, cmd):
@@ -37,9 +61,47 @@
         return dumbencode(s, cmd)
     return s
 
-util.filtertable.update({
+_filters = {
     'dumbdecode:': dumbdecode,
     'dumbencode:': dumbencode,
     'cleverdecode:': cleverdecode,
     'cleverencode:': cleverencode,
-    })
+    }
+
+def forbidcrlf(ui, repo, hooktype, node, **kwargs):
+    halt = False
+    for rev in xrange(repo.changelog.rev(bin(node)), repo.changelog.count()):
+        c = repo.changectx(rev)
+        for f in c.files():
+            if f not in c:
+                continue
+            data = c[f].data()
+            if '\0' not in data and '\r\n' in data:
+                if not halt:
+                    ui.warn(_('Attempt to commit or push text file(s) '
+                              'using CRLF line endings\n'))
+                ui.warn(_('in %s: %s\n') % (short(c.node()), f))
+                halt = True
+    if halt and hooktype == 'pretxnchangegroup':
+        ui.warn(_('\nTo prevent this mistake in your local repository,\n'
+                  'add to Mercurial.ini or .hg/hgrc:\n'
+                  '\n'
+                  '[hooks]\n'
+                  'pretxncommit.crlf = python:hgext.win32text.forbidcrlf\n'
+                  '\n'
+                  'and also consider adding:\n'
+                  '\n'
+                  '[extensions]\n'
+                  'hgext.win32text =\n'
+                  '[encode]\n'
+                  '** = cleverencode:\n'
+                  '[decode]\n'
+                  '** = cleverdecode:\n'))
+    return halt
+
+def reposetup(ui, repo):
+    if not repo.local():
+        return
+    for name, fn in _filters.iteritems():
+        repo.adddatafilter(name, fn)
+
--- a/hgmerge	Thu Jul 26 07:56:27 2007 -0400
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,202 +0,0 @@
-#!/bin/sh
-#
-# hgmerge - default merge helper for Mercurial
-#
-# This tries to find a way to do three-way merge on the current system.
-# The result ought to end up in $1.  Script is run in root directory of
-# repository.
-#
-# Environment variables set by Mercurial:
-# HG_FILE            name of file within repo
-# HG_MY_NODE         revision being merged
-# HG_OTHER_NODE      revision being merged
-
-set -e # bail out quickly on failure
-
-LOCAL="$1"
-BASE="$2"
-OTHER="$3"
-
-if [ -z "$EDITOR" ]; then
-    EDITOR="vi"
-fi
-
-# find decent versions of our utilities, insisting on the GNU versions where we
-# need to
-MERGE="merge"
-DIFF3="gdiff3"
-DIFF="gdiff"
-PATCH="gpatch"
-
-type "$MERGE" >/dev/null 2>&1 || MERGE=
-type "$DIFF3" >/dev/null 2>&1 || DIFF3="diff3"
-$DIFF3 --version >/dev/null 2>&1 || DIFF3=
-type "$DIFF"  >/dev/null 2>&1 || DIFF="diff"
-type "$DIFF"  >/dev/null 2>&1 || DIFF=
-type "$PATCH" >/dev/null 2>&1 || PATCH="patch"
-type "$PATCH" >/dev/null 2>&1 || PATCH=
-
-# find optional visual utilities
-FILEMERGE="/Developer/Applications/Utilities/FileMerge.app/Contents/MacOS/FileMerge"
-KDIFF3="kdiff3"
-TKDIFF="tkdiff"
-MELD="meld"
-
-type "$FILEMERGE" >/dev/null 2>&1 || FILEMERGE=
-type "$KDIFF3"    >/dev/null 2>&1 || KDIFF3=
-type "$TKDIFF"    >/dev/null 2>&1 || TKDIFF=
-type "$MELD"      >/dev/null 2>&1 || MELD=
-
-# Hack for Solaris
-TEST="/usr/bin/test"
-type "$TEST" >/dev/null 2>&1 || TEST="/bin/test"
-type "$TEST" >/dev/null 2>&1 || TEST="test"
-
-# random part of names
-RAND="$RANDOM$RANDOM"
-
-# temporary directory for diff+patch merge
-HGTMP="${TMPDIR-/tmp}/hgmerge.$RAND"
-
-# backup file
-BACKUP="$LOCAL.orig.$RAND"
-
-# file used to test for file change
-CHGTEST="$LOCAL.chg.$RAND"
-
-# put all your required cleanup here
-cleanup() {
-    rm -f "$BACKUP" "$CHGTEST"
-    rm -rf "$HGTMP"
-}
-
-# functions concerning program exit
-success() {
-    cleanup
-    exit 0
-}
-
-failure() {
-    echo "merge failed" 1>&2
-    mv "$BACKUP" "$LOCAL"
-    cleanup
-    exit 1
-}
-
-# Ask if the merge was successful
-ask_if_merged() {
-    while true; do
-        echo "$LOCAL seems unchanged."
-        echo "Was the merge successful? [y/n]"
-        read answer
-        case "$answer" in
-            y*|Y*) success;;
-            n*|N*) failure;;
-        esac
-    done
-}
-
-# Check if conflict markers are present and ask if the merge was successful
-conflicts_or_success() {
-    while egrep '^(<<<<<<< .*|=======|>>>>>>> .*)$' "$LOCAL" >/dev/null; do
-        echo "$LOCAL contains conflict markers."
-        echo "Keep this version? [y/n]"
-        read answer
-        case "$answer" in
-            y*|Y*) success;;
-            n*|N*) failure;;
-        esac
-    done
-    success
-}
-
-# Clean up when interrupted
-trap "failure" 1 2 3 6 15 # HUP INT QUIT ABRT TERM
-
-# Back up our file (and try hard to keep the mtime unchanged)
-mv "$LOCAL" "$BACKUP"
-cp "$BACKUP" "$LOCAL"
-
-# Attempt to do a non-interactive merge
-if [ -n "$MERGE" -o -n "$DIFF3" ]; then
-    if [ -n "$MERGE" ]; then
-        $MERGE "$LOCAL" "$BASE" "$OTHER" 2> /dev/null && success
-    elif [ -n "$DIFF3" ]; then
-        $DIFF3 -m "$BACKUP" "$BASE" "$OTHER" > "$LOCAL" && success
-    fi
-    if [ $? -gt 1 ]; then
-        echo "automatic merge failed! Exiting." 1>&2
-        failure
-    fi
-fi
-
-# on MacOS X try FileMerge.app, shipped with Apple's developer tools
-if [ -n "$FILEMERGE" ]; then
-    cp "$BACKUP" "$LOCAL"
-    cp "$BACKUP" "$CHGTEST"
-    # filemerge prefers the right by default
-    $FILEMERGE -left "$OTHER" -right "$LOCAL" -ancestor "$BASE" -merge "$LOCAL"
-    [ $? -ne 0 ] && echo "FileMerge failed to launch" && failure
-    $TEST "$LOCAL" -nt "$CHGTEST" && conflicts_or_success || ask_if_merged
-fi
-
-if [ -n "$DISPLAY" ]; then
-    # try using kdiff3, which is fairly nice
-    if [ -n "$KDIFF3" ]; then
-        $KDIFF3 --auto "$BASE" "$BACKUP" "$OTHER" -o "$LOCAL" || failure
-        conflicts_or_success
-    fi
-
-    # try using tkdiff, which is a bit less sophisticated
-    if [ -n "$TKDIFF" ]; then
-        $TKDIFF "$BACKUP" "$OTHER" -a "$BASE" -o "$LOCAL" || failure
-        conflicts_or_success
-    fi
-
-    if [ -n "$MELD" ]; then
-        cp "$BACKUP" "$CHGTEST"
-        # protect our feet - meld allows us to save to the left file
-        cp "$BACKUP" "$LOCAL.tmp.$RAND"
-        # Meld doesn't have automatic merging, so to reduce intervention
-        # use the file with conflicts
-        $MELD "$LOCAL.tmp.$RAND" "$LOCAL" "$OTHER" || failure
-        # Also it doesn't return good error code
-        $TEST "$LOCAL" -nt "$CHGTEST" && conflicts_or_success || ask_if_merged
-    fi
-fi
-
-# Attempt to do a merge with $EDITOR
-if [ -n "$MERGE" -o -n "$DIFF3" ]; then
-    echo "conflicts detected in $LOCAL"
-    cp "$BACKUP" "$CHGTEST"
-    $EDITOR "$LOCAL" || failure
-    # Some editors do not return meaningful error codes
-    # Do not take any chances
-    $TEST "$LOCAL" -nt "$CHGTEST" && conflicts_or_success || ask_if_merged
-fi
-
-# attempt to manually merge with diff and patch
-if [ -n "$DIFF" -a -n "$PATCH" ]; then
-
-    (umask 077 && mkdir "$HGTMP") || {
-        echo "Could not create temporary directory $HGTMP" 1>&2
-        failure
-    }
-
-    $DIFF -u "$BASE" "$OTHER" > "$HGTMP/diff" || :
-    if $PATCH "$LOCAL" < "$HGTMP/diff"; then
-        success
-    else
-        # If rejects are empty after using the editor, merge was ok
-        $EDITOR "$LOCAL" "$LOCAL.rej" || failure
-        $TEST -s "$LOCAL.rej" || success
-    fi
-    failure
-fi
-
-echo
-echo "hgmerge: unable to find any merge utility!"
-echo "supported programs:"
-echo "merge, FileMerge, tkdiff, kdiff3, meld, diff+patch"
-echo
-failure
--- a/hgweb.cgi	Thu Jul 26 07:56:27 2007 -0400
+++ b/hgweb.cgi	Wed Feb 06 19:57:52 2008 -0800
@@ -2,14 +2,17 @@
 #
 # An example CGI script to use hgweb, edit as necessary
 
+# adjust python path if not a system-wide install:
+#import sys
+#sys.path.insert(0, "/path/to/python/lib")
+
+# enable importing on demand to reduce startup time
+from mercurial import demandimport; demandimport.enable()
+
 # send python tracebacks to the browser if an error occurs:
 import cgitb
 cgitb.enable()
 
-# adjust python path if not a system-wide install:
-#import sys
-#sys.path.insert(0, "/path/to/python/lib")
-
 # If you'd like to serve pages with UTF-8 instead of your default
 # locale charset, you can do so by uncommenting the following lines.
 # Note that this will cause your .hgrc files to be interpreted in
@@ -19,10 +22,9 @@
 #os.environ["HGENCODING"] = "UTF-8"
 
 from mercurial.hgweb.hgweb_mod import hgweb
-from mercurial.hgweb.request import wsgiapplication
+from mercurial import dispatch, ui
 import mercurial.hgweb.wsgicgi as wsgicgi
 
-def make_web_app():
-    return hgweb("/path/to/repo", "repository name")
-
-wsgicgi.launch(wsgiapplication(make_web_app))
+u = ui.ui(report_untrusted=False, interactive=False)
+dispatch.profiled(u, lambda: wsgicgi.launch(hgweb("/path/to/repo",
+                                                  "repository name", u)))
--- a/hgwebdir.cgi	Thu Jul 26 07:56:27 2007 -0400
+++ b/hgwebdir.cgi	Wed Feb 06 19:57:52 2008 -0800
@@ -2,14 +2,17 @@
 #
 # An example CGI script to export multiple hgweb repos, edit as necessary
 
+# adjust python path if not a system-wide install:
+#import sys
+#sys.path.insert(0, "/path/to/python/lib")
+
+# enable importing on demand to reduce startup time
+from mercurial import demandimport; demandimport.enable()
+
 # send python tracebacks to the browser if an error occurs:
 import cgitb
 cgitb.enable()
 
-# adjust python path if not a system-wide install:
-#import sys
-#sys.path.insert(0, "/path/to/python/lib")
-
 # If you'd like to serve pages with UTF-8 instead of your default
 # locale charset, you can do so by uncommenting the following lines.
 # Note that this will cause your .hgrc files to be interpreted in
@@ -19,7 +22,7 @@
 #os.environ["HGENCODING"] = "UTF-8"
 
 from mercurial.hgweb.hgwebdir_mod import hgwebdir
-from mercurial.hgweb.request import wsgiapplication
+from mercurial import dispatch, ui
 import mercurial.hgweb.wsgicgi as wsgicgi
 
 # The config file looks like this.  You can have paths to individual
@@ -41,7 +44,5 @@
 # Alternatively you can pass a list of ('virtual/path', '/real/path') tuples
 # or use a dictionary with entries like 'virtual/path': '/real/path'
 
-def make_web_app():
-    return hgwebdir("hgweb.config")
-
-wsgicgi.launch(wsgiapplication(make_web_app))
+u = ui.ui(report_untrusted=False, interactive=False)
+dispatch.profiled(u, lambda: wsgicgi.launch(hgwebdir('hgweb.config', u)))
--- a/mercurial/archival.py	Thu Jul 26 07:56:27 2007 -0400
+++ b/mercurial/archival.py	Wed Feb 06 19:57:52 2008 -0800
@@ -15,7 +15,7 @@
     safe for consumers.'''
 
     if prefix:
-        prefix = prefix.replace('\\', '/')
+        prefix = util.normpath(prefix)
     else:
         if not isinstance(dest, str):
             raise ValueError('dest must be string if no prefix')
@@ -208,6 +208,8 @@
         archiver.addfile(name, mode, islink, data)
 
     ctx = repo.changectx(node)
+    if kind not in archivers:
+        raise util.Abort(_("unknown archive type '%s'" % kind))
     archiver = archivers[kind](dest, prefix, mtime or ctx.date()[0])
     m = ctx.manifest()
     items = m.items()
--- a/mercurial/bdiff.c	Thu Jul 26 07:56:27 2007 -0400
+++ b/mercurial/bdiff.c	Wed Feb 06 19:57:52 2008 -0800
@@ -12,6 +12,7 @@
 #include <Python.h>
 #include <stdlib.h>
 #include <string.h>
+#include <limits.h>
 
 #if defined __hpux || defined __SUNPRO_C || defined _AIX
 # define inline
@@ -58,21 +59,17 @@
 	struct hunk *base, *head;
 };
 
-static inline uint32_t rol32(uint32_t word, unsigned int shift)
-{
-        return (word << shift) | (word >> (32 - shift));
-}
-
 int splitlines(const char *a, int len, struct line **lr)
 {
-	int g, h, i;
+	int h, i;
 	const char *p, *b = a;
+	const char * const plast = a + len - 1;
 	struct line *l;
 
 	/* count the lines */
 	i = 1; /* extra line for sentinel */
 	for (p = a; p < a + len; p++)
-		if (*p == '\n' || p == a + len - 1)
+		if (*p == '\n' || p == plast)
 			i++;
 
 	*lr = l = (struct line *)malloc(sizeof(struct line) * i);
@@ -82,24 +79,17 @@
 	/* build the line array and calculate hashes */
 	h = 0;
 	for (p = a; p < a + len; p++) {
-		/*
-		 * a simple hash from GNU diff, with better collision
-		 * resistance from hashpjw. this slows down common
-		 * case by 10%, but speeds up worst case by 100x.
-		 */
-		h = *p + rol32(h, 7);
-		if ((g = h & 0xf0000000)) {
-			h ^= g >> 24;
-			h ^= g;
-		}
-		if (*p == '\n' || p == a + len - 1) {
+		/* Leonid Yuriev's hash */
+                h = (h * 1664525) + *p + 1013904223;
+
+		if (*p == '\n' || p == plast) {
+			l->h = h;
+			h = 0;
 			l->len = p - b + 1;
-			l->h = h * l->len;
 			l->l = b;
-			l->n = -1;
+			l->n = INT_MAX;
 			l++;
 			b = p + 1;
-			h = 0;
 		}
 	}
 
@@ -116,28 +106,35 @@
 
 static int equatelines(struct line *a, int an, struct line *b, int bn)
 {
-	int i, j, buckets = 1, t;
-	struct pos *h;
+	int i, j, buckets = 1, t, scale;
+	struct pos *h = NULL;
 
 	/* build a hash table of the next highest power of 2 */
 	while (buckets < bn + 1)
 		buckets *= 2;
 
-	h = (struct pos *)malloc(buckets * sizeof(struct pos));
-	buckets = buckets - 1;
+	/* try to allocate a large hash table to avoid collisions */
+	for (scale = 4; scale; scale /= 2) {
+		h = (struct pos *)malloc(scale * buckets * sizeof(struct pos));
+		if (h)
+			break;
+	}
+
 	if (!h)
 		return 0;
 
+	buckets = buckets * scale - 1;
+
 	/* clear the hash table */
 	for (i = 0; i <= buckets; i++) {
-		h[i].pos = -1;
+		h[i].pos = INT_MAX;
 		h[i].len = 0;
 	}
 
 	/* add lines to the hash table chains */
 	for (i = bn - 1; i >= 0; i--) {
 		/* find the equivalence class */
-		for (j = b[i].h & buckets; h[j].pos != -1;
+		for (j = b[i].h & buckets; h[j].pos != INT_MAX;
 		     j = (j + 1) & buckets)
 			if (!cmp(b + i, b + h[j].pos))
 				break;
@@ -150,12 +147,12 @@
 	}
 
 	/* compute popularity threshold */
-	t = (bn >= 200) ? bn / 100 : bn + 1;
+	t = (bn >= 4000) ? bn / 1000 : bn + 1;
 
 	/* match items in a to their equivalence class in b */
 	for (i = 0; i < an; i++) {
 		/* find the equivalence class */
-		for (j = a[i].h & buckets; h[j].pos != -1;
+		for (j = a[i].h & buckets; h[j].pos != INT_MAX;
 		     j = (j + 1) & buckets)
 			if (!cmp(a + i, b + h[j].pos))
 				break;
@@ -164,7 +161,7 @@
 		if (h[j].len <= t)
 			a[i].n = h[j].pos; /* point to head of match list */
 		else
-			a[i].n = -1; /* too popular */
+			a[i].n = INT_MAX; /* too popular */
 	}
 
 	/* discard hash tables */
@@ -179,11 +176,11 @@
 
 	for (i = a1; i < a2; i++) {
 		/* skip things before the current block */
-		for (j = a[i].n; j != -1 && j < b1; j = b[j].n)
+		for (j = a[i].n; j < b1; j = b[j].n)
 			;
 
 		/* loop through all lines match a[i] in b */
-		for (; j != -1 && j < b2; j = b[j].n) {
+		for (; j < b2; j = b[j].n) {
 			/* does this extend an earlier match? */
 			if (i > a1 && j > b1 && pos[j - 1].pos == i - 1)
 				k = pos[j - 1].len + 1;
@@ -216,6 +213,7 @@
 
 	*omi = mi - mb;
 	*omj = mj - mb;
+
 	return mk + mb;
 }
 
@@ -247,7 +245,7 @@
 
 	/* allocate and fill arrays */
 	t = equatelines(a, an, b, bn);
-	pos = (struct pos *)calloc(bn, sizeof(struct pos));
+	pos = (struct pos *)calloc(bn ? bn : 1, sizeof(struct pos));
 	/* we can't have more matches than lines in the shorter file */
 	l.head = l.base = (struct hunk *)malloc(sizeof(struct hunk) *
 	                                        ((an<bn ? an:bn) + 1));
--- a/mercurial/bundlerepo.py	Thu Jul 26 07:56:27 2007 -0400
+++ b/mercurial/bundlerepo.py	Wed Feb 06 19:57:52 2008 -0800
@@ -48,7 +48,7 @@
                 continue
             for p in (p1, p2):
                 if not p in self.nodemap:
-                    raise revlog.LookupError(_("unknown parent %s") % short(p1))
+                    raise revlog.LookupError(hex(p1), _("unknown parent %s") % short(p1))
             if linkmapper is None:
                 link = n
             else:
@@ -56,8 +56,8 @@
 
             if not prev:
                 prev = p1
-            # start, size, base is not used, link, p1, p2, delta ref
-            e = (revlog.offset_type(start, 0), size, -1, None, link,
+            # start, size, full unc. size, base (unused), link, p1, p2, node
+            e = (revlog.offset_type(start, 0), size, -1, -1, link,
                  self.rev(p1), self.rev(p2), node)
             self.basemap[n] = prev
             self.index.insert(-1, e)
@@ -193,18 +193,27 @@
         else:
             raise util.Abort(_("%s: unknown bundle compression type")
                              % bundlename)
-        self.changelog = bundlechangelog(self.sopener, self.bundlefile)
-        self.manifest = bundlemanifest(self.sopener, self.bundlefile,
-                                       self.changelog.rev)
         # dict with the mapping 'filename' -> position in the bundle
         self.bundlefilespos = {}
-        while 1:
-            f = changegroup.getchunk(self.bundlefile)
-            if not f:
-                break
-            self.bundlefilespos[f] = self.bundlefile.tell()
-            for c in changegroup.chunkiter(self.bundlefile):
-                pass
+
+    def __getattr__(self, name):
+        if name == 'changelog':
+            self.changelog = bundlechangelog(self.sopener, self.bundlefile)
+            self.manstart = self.bundlefile.tell()
+            return self.changelog
+        if name == 'manifest':
+            self.bundlefile.seek(self.manstart)
+            self.manifest = bundlemanifest(self.sopener, self.bundlefile,
+                                           self.changelog.rev)
+            self.filestart = self.bundlefile.tell()
+            return self.manifest
+        if name == 'manstart':
+            self.changelog
+            return self.manstart
+        if name == 'filestart':
+            self.manifest
+            return self.filestart
+        return localrepo.localrepository.__getattr__(self, name)
 
     def url(self):
         return self._url
@@ -213,6 +222,16 @@
         return -1
 
     def file(self, f):
+        if not self.bundlefilespos:
+            self.bundlefile.seek(self.filestart)
+            while 1:
+                chunk = changegroup.getchunk(self.bundlefile)
+                if not chunk:
+                    break
+                self.bundlefilespos[chunk] = self.bundlefile.tell()
+                for c in changegroup.chunkiter(self.bundlefile):
+                    pass
+
         if f[0] == '/':
             f = f[1:]
         if f in self.bundlefilespos:
@@ -237,14 +256,25 @@
 def instance(ui, path, create):
     if create:
         raise util.Abort(_('cannot create new bundle repository'))
+    parentpath = ui.config("bundle", "mainreporoot", "")
+    if parentpath:
+        # Try to make the full path relative so we get a nice, short URL.
+        # In particular, we don't want temp dir names in test outputs.
+        cwd = os.getcwd()
+        if parentpath == cwd:
+            parentpath = ''
+        else:
+            cwd = os.path.join(cwd,'')
+            if parentpath.startswith(cwd):
+                parentpath = parentpath[len(cwd):]
     path = util.drop_scheme('file', path)
     if path.startswith('bundle:'):
         path = util.drop_scheme('bundle', path)
         s = path.split("+", 1)
         if len(s) == 1:
-            repopath, bundlename = "", s[0]
+            repopath, bundlename = parentpath, s[0]
         else:
             repopath, bundlename = s
     else:
-        repopath, bundlename = '', path
+        repopath, bundlename = parentpath, path
     return bundlerepository(ui, repopath, bundlename)
--- a/mercurial/byterange.py	Thu Jul 26 07:56:27 2007 -0400
+++ b/mercurial/byterange.py	Wed Feb 06 19:57:52 2008 -0800
@@ -233,7 +233,7 @@
             size = (lb - fb)
             fo = RangeableFileObject(fo, (fb, lb))
         headers = mimetools.Message(StringIO(
-            'Content-Type: %s\nContent-Length: %d\nLast-modified: %s\n' %
+            'Content-Type: %s\nContent-Length: %d\nLast-Modified: %s\n' %
             (mtype or 'text/plain', size, modified)))
         return urllib.addinfourl(fo, headers, 'file:'+file)
 
--- a/mercurial/changegroup.py	Thu Jul 26 07:56:27 2007 -0400
+++ b/mercurial/changegroup.py	Wed Feb 06 19:57:52 2008 -0800
@@ -33,10 +33,9 @@
             break
         yield c
 
-def genchunk(data):
-    """build a changegroup chunk"""
-    header = struct.pack(">l", len(data)+ 4)
-    return "%s%s" % (header, data)
+def chunkheader(length):
+    """build a changegroup chunk header"""
+    return struct.pack(">l", length + 4)
 
 def closechunk():
     return struct.pack(">l", 0)
@@ -81,12 +80,21 @@
         # in case of sshrepo because we don't know the end of the stream
 
         # an empty chunkiter is the end of the changegroup
+        # a changegroup has at least 2 chunkiters (changelog and manifest).
+        # after that, an empty chunkiter is the end of the changegroup
         empty = False
-        while not empty:
+        count = 0
+        while not empty or count <= 2:
             empty = True
+            count += 1
             for chunk in chunkiter(cg):
                 empty = False
-                fh.write(z.compress(genchunk(chunk)))
+                fh.write(z.compress(chunkheader(len(chunk))))
+                pos = 0
+                while pos < len(chunk):
+                    next = pos + 2**20
+                    fh.write(z.compress(chunk[pos:next]))
+                    pos = next
             fh.write(z.compress(closechunk()))
         fh.write(z.flush())
         cleanup = None
--- a/mercurial/changelog.py	Thu Jul 26 07:56:27 2007 -0400
+++ b/mercurial/changelog.py	Wed Feb 06 19:57:52 2008 -0800
@@ -16,16 +16,13 @@
     >>> s
     'ab\\ncd\\\\\\\\n\\x00ab\\rcd\\\\\\n'
     >>> res = _string_escape(s)
-    >>> s == _string_unescape(res)
+    >>> s == res.decode('string_escape')
     True
     """
     # subset of the string_escape codec
     text = text.replace('\\', '\\\\').replace('\n', '\\n').replace('\r', '\\r')
     return text.replace('\0', '\\0')
 
-def _string_unescape(text):
-    return text.decode('string_escape')
-
 class appender:
     '''the changelog index must be update last on disk, so we use this class
     to delay writes to it'''
@@ -74,7 +71,7 @@
         return ret
 
     def write(self, s):
-        self.data.append(s)
+        self.data.append(str(s))
         self.offset += len(s)
 
 class changelog(revlog):
@@ -123,10 +120,9 @@
     def decode_extra(self, text):
         extra = {}
         for l in text.split('\0'):
-            if not l:
-                continue
-            k, v = _string_unescape(l).split(':', 1)
-            extra[k] = v
+            if l:
+                k, v = l.decode('string_escape').split(':', 1)
+                extra[k] = v
         return extra
 
     def encode_extra(self, d):
@@ -136,7 +132,7 @@
         items = [_string_escape('%s:%s' % (k, d[k])) for k in keys]
         return "\0".join(items)
 
-    def extract(self, text):
+    def read(self, node):
         """
         format used:
         nodeid\n        : manifest node in ascii
@@ -149,6 +145,7 @@
 
         changelog v0 doesn't use extra
         """
+        text = self.revision(node)
         if not text:
             return (nullid, "", (0, 0), [], "", {'branch': 'default'})
         last = text.index("\n\n")
@@ -175,9 +172,6 @@
         files = l[3:]
         return (manifest, user, (time, timezone), files, desc, extra)
 
-    def read(self, node):
-        return self.extract(self.revision(node))
-
     def add(self, manifest, list, desc, transaction, p1=None, p2=None,
                   user=None, date=None, extra={}):
 
--- a/mercurial/cmdutil.py	Thu Jul 26 07:56:27 2007 -0400
+++ b/mercurial/cmdutil.py	Wed Feb 06 19:57:52 2008 -0800
@@ -7,9 +7,8 @@
 
 from node import *
 from i18n import _
-import os, sys, atexit, signal, pdb, traceback, socket, errno, shlex
-import mdiff, bdiff, util, templater, patch, commands, hg, lock, time
-import fancyopts, revlog, version, extensions, hook
+import os, sys, bisect, stat
+import mdiff, bdiff, util, templater, templatefilters, patch, errno
 
 revrangesep = ':'
 
@@ -17,130 +16,8 @@
     """Exception raised if command is not in the command table."""
 class AmbiguousCommand(Exception):
     """Exception raised if command shortcut matches more than one command."""
-class ParseError(Exception):
-    """Exception raised on errors in parsing the command line."""
 
-def runcatch(ui, args, argv0=None):
-    def catchterm(*args):
-        raise util.SignalInterrupt
-
-    for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM':
-        num = getattr(signal, name, None)
-        if num: signal.signal(num, catchterm)
-
-    try:
-        try:
-            # enter the debugger before command execution
-            if '--debugger' in args:
-                pdb.set_trace()
-            try:
-                return dispatch(ui, args, argv0=argv0)
-            finally:
-                ui.flush()
-        except:
-            # enter the debugger when we hit an exception
-            if '--debugger' in args:
-                pdb.post_mortem(sys.exc_info()[2])
-            ui.print_exc()
-            raise
-
-    except ParseError, inst:
-        if inst.args[0]:
-            ui.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
-            commands.help_(ui, inst.args[0])
-        else:
-            ui.warn(_("hg: %s\n") % inst.args[1])
-            commands.help_(ui, 'shortlist')
-    except AmbiguousCommand, inst:
-        ui.warn(_("hg: command '%s' is ambiguous:\n    %s\n") %
-                (inst.args[0], " ".join(inst.args[1])))
-    except UnknownCommand, inst:
-        ui.warn(_("hg: unknown command '%s'\n") % inst.args[0])
-        commands.help_(ui, 'shortlist')
-    except hg.RepoError, inst:
-        ui.warn(_("abort: %s!\n") % inst)
-    except lock.LockHeld, inst:
-        if inst.errno == errno.ETIMEDOUT:
-            reason = _('timed out waiting for lock held by %s') % inst.locker
-        else:
-            reason = _('lock held by %s') % inst.locker
-        ui.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
-    except lock.LockUnavailable, inst:
-        ui.warn(_("abort: could not lock %s: %s\n") %
-               (inst.desc or inst.filename, inst.strerror))
-    except revlog.RevlogError, inst:
-        ui.warn(_("abort: %s!\n") % inst)
-    except util.SignalInterrupt:
-        ui.warn(_("killed!\n"))
-    except KeyboardInterrupt:
-        try:
-            ui.warn(_("interrupted!\n"))
-        except IOError, inst:
-            if inst.errno == errno.EPIPE:
-                if ui.debugflag:
-                    ui.warn(_("\nbroken pipe\n"))
-            else:
-                raise
-    except socket.error, inst:
-        ui.warn(_("abort: %s\n") % inst[1])
-    except IOError, inst:
-        if hasattr(inst, "code"):
-            ui.warn(_("abort: %s\n") % inst)
-        elif hasattr(inst, "reason"):
-            try: # usually it is in the form (errno, strerror)
-                reason = inst.reason.args[1]
-            except: # it might be anything, for example a string
-                reason = inst.reason
-            ui.warn(_("abort: error: %s\n") % reason)
-        elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
-            if ui.debugflag:
-                ui.warn(_("broken pipe\n"))
-        elif getattr(inst, "strerror", None):
-            if getattr(inst, "filename", None):
-                ui.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
-            else:
-                ui.warn(_("abort: %s\n") % inst.strerror)
-        else:
-            raise
-    except OSError, inst:
-        if getattr(inst, "filename", None):
-            ui.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
-        else:
-            ui.warn(_("abort: %s\n") % inst.strerror)
-    except util.UnexpectedOutput, inst:
-        ui.warn(_("abort: %s") % inst[0])
-        if not isinstance(inst[1], basestring):
-            ui.warn(" %r\n" % (inst[1],))
-        elif not inst[1]:
-            ui.warn(_(" empty string\n"))
-        else:
-            ui.warn("\n%r\n" % util.ellipsis(inst[1]))
-    except ImportError, inst:
-        m = str(inst).split()[-1]
-        ui.warn(_("abort: could not import module %s!\n" % m))
-        if m in "mpatch bdiff".split():
-            ui.warn(_("(did you forget to compile extensions?)\n"))
-        elif m in "zlib".split():
-            ui.warn(_("(is your Python install correct?)\n"))
-
-    except util.Abort, inst:
-        ui.warn(_("abort: %s\n") % inst)
-    except SystemExit, inst:
-        # Commands shouldn't sys.exit directly, but give a return code.
-        # Just in case catch this and and pass exit code to caller.
-        return inst.code
-    except:
-        ui.warn(_("** unknown exception encountered, details follow\n"))
-        ui.warn(_("** report bug details to "
-                 "http://www.selenic.com/mercurial/bts\n"))
-        ui.warn(_("** or mercurial@selenic.com\n"))
-        ui.warn(_("** Mercurial Distributed SCM (version %s)\n")
-               % version.get_version())
-        raise
-
-    return -1
-
-def findpossible(ui, cmd):
+def findpossible(ui, cmd, table):
     """
     Return cmd -> (aliases, command table entry)
     for each matching command.
@@ -148,7 +25,7 @@
     """
     choice = {}
     debugchoice = {}
-    for e in commands.table.keys():
+    for e in table.keys():
         aliases = e.lstrip("^").split("|")
         found = None
         if cmd in aliases:
@@ -160,20 +37,20 @@
                     break
         if found is not None:
             if aliases[0].startswith("debug") or found.startswith("debug"):
-                debugchoice[found] = (aliases, commands.table[e])
+                debugchoice[found] = (aliases, table[e])
             else:
-                choice[found] = (aliases, commands.table[e])
+                choice[found] = (aliases, table[e])
 
     if not choice and debugchoice:
         choice = debugchoice
 
     return choice
 
-def findcmd(ui, cmd):
+def findcmd(ui, cmd, table):
     """Return (aliases, command table entry) for command string."""
-    choice = findpossible(ui, cmd)
+    choice = findpossible(ui, cmd, table)
 
-    if choice.has_key(cmd):
+    if cmd in choice:
         return choice[cmd]
 
     if len(choice) > 1:
@@ -186,251 +63,9 @@
 
     raise UnknownCommand(cmd)
 
-def findrepo():
-    p = os.getcwd()
-    while not os.path.isdir(os.path.join(p, ".hg")):
-        oldp, p = p, os.path.dirname(p)
-        if p == oldp:
-            return None
-
-    return p
-
-def parse(ui, args):
-    options = {}
-    cmdoptions = {}
-
-    try:
-        args = fancyopts.fancyopts(args, commands.globalopts, options)
-    except fancyopts.getopt.GetoptError, inst:
-        raise ParseError(None, inst)
-
-    if args:
-        cmd, args = args[0], args[1:]
-        aliases, i = findcmd(ui, cmd)
-        cmd = aliases[0]
-        defaults = ui.config("defaults", cmd)
-        if defaults:
-            args = shlex.split(defaults) + args
-        c = list(i[1])
-    else:
-        cmd = None
-        c = []
-
-    # combine global options into local
-    for o in commands.globalopts:
-        c.append((o[0], o[1], options[o[1]], o[3]))
-
-    try:
-        args = fancyopts.fancyopts(args, c, cmdoptions)
-    except fancyopts.getopt.GetoptError, inst:
-        raise ParseError(cmd, inst)
-
-    # separate global options back out
-    for o in commands.globalopts:
-        n = o[1]
-        options[n] = cmdoptions[n]
-        del cmdoptions[n]
-
-    return (cmd, cmd and i[0] or None, args, options, cmdoptions)
-
-def parseconfig(config):
-    """parse the --config options from the command line"""
-    parsed = []
-    for cfg in config:
-        try:
-            name, value = cfg.split('=', 1)
-            section, name = name.split('.', 1)
-            if not section or not name:
-                raise IndexError
-            parsed.append((section, name, value))
-        except (IndexError, ValueError):
-            raise util.Abort(_('malformed --config option: %s') % cfg)
-    return parsed
-
-def earlygetopt(aliases, args):
-    """Return list of values for an option (or aliases).
-
-    The values are listed in the order they appear in args.
-    The options and values are removed from args.
-    """
-    try:
-        argcount = args.index("--")
-    except ValueError:
-        argcount = len(args)
-    shortopts = [opt for opt in aliases if len(opt) == 2]
-    values = []
-    pos = 0
-    while pos < argcount:
-        if args[pos] in aliases:
-            if pos + 1 >= argcount:
-                # ignore and let getopt report an error if there is no value
-                break
-            del args[pos]
-            values.append(args.pop(pos))
-            argcount -= 2
-        elif args[pos][:2] in shortopts:
-            # short option can have no following space, e.g. hg log -Rfoo
-            values.append(args.pop(pos)[2:])
-            argcount -= 1
-        else:
-            pos += 1
-    return values
-
-def dispatch(ui, args, argv0=None):
-    # remember how to call 'hg' before changing the working dir
-    util.set_hgexecutable(argv0)
-
-    # read --config before doing anything else
-    # (e.g. to change trust settings for reading .hg/hgrc)
-    config = earlygetopt(['--config'], args)
-    if config:
-        ui.updateopts(config=parseconfig(config))
-
-    # check for cwd
-    cwd = earlygetopt(['--cwd'], args)
-    if cwd:
-        os.chdir(cwd[-1])
-
-    # read the local repository .hgrc into a local ui object
-    path = findrepo() or ""
-    if not path:
-        lui = ui
-    if path:
-        try:
-            lui = commands.ui.ui(parentui=ui)
-            lui.readconfig(os.path.join(path, ".hg", "hgrc"))
-        except IOError:
-            pass
-
-    # now we can expand paths, even ones in .hg/hgrc
-    rpath = earlygetopt(["-R", "--repository", "--repo"], args)
-    if rpath:
-        path = lui.expandpath(rpath[-1])
-        lui = commands.ui.ui(parentui=ui)
-        lui.readconfig(os.path.join(path, ".hg", "hgrc"))
-
-    extensions.loadall(lui)
-    # check for fallback encoding
-    fallback = lui.config('ui', 'fallbackencoding')
-    if fallback:
-        util._fallbackencoding = fallback
-
-    fullargs = args
-    cmd, func, args, options, cmdoptions = parse(lui, args)
-
-    if options["config"]:
-        raise util.Abort(_("Option --config may not be abbreviated!"))
-    if options["cwd"]:
-        raise util.Abort(_("Option --cwd may not be abbreviated!"))
-    if options["repository"]:
-        raise util.Abort(_(
-            "Option -R has to be separated from other options (i.e. not -qR) "
-            "and --repository may only be abbreviated as --repo!"))
-
-    if options["encoding"]:
-        util._encoding = options["encoding"]
-    if options["encodingmode"]:
-        util._encodingmode = options["encodingmode"]
-    if options["time"]:
-        def get_times():
-            t = os.times()
-            if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
-                t = (t[0], t[1], t[2], t[3], time.clock())
-            return t
-        s = get_times()
-        def print_time():
-            t = get_times()
-            ui.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
-                (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
-        atexit.register(print_time)
-
-    ui.updateopts(options["verbose"], options["debug"], options["quiet"],
-                 not options["noninteractive"], options["traceback"])
-
-    if options['help']:
-        return commands.help_(ui, cmd, options['version'])
-    elif options['version']:
-        return commands.version_(ui)
-    elif not cmd:
-        return commands.help_(ui, 'shortlist')
-
-    repo = None
-    if cmd not in commands.norepo.split():
-        try:
-            repo = hg.repository(ui, path=path)
-            ui = repo.ui
-            if not repo.local():
-                raise util.Abort(_("repository '%s' is not local") % path)
-        except hg.RepoError:
-            if cmd not in commands.optionalrepo.split():
-                if not path:
-                    raise hg.RepoError(_("There is no Mercurial repository here"
-                                         " (.hg not found)"))
-                raise
-        d = lambda: func(ui, repo, *args, **cmdoptions)
-    else:
-        d = lambda: func(ui, *args, **cmdoptions)
-
-    # run pre-hook, and abort if it fails
-    ret = hook.hook(ui, repo, "pre-%s" % cmd, False, args=" ".join(fullargs))
-    if ret:
-        return ret
-    ret = runcommand(ui, options, cmd, d)
-    # run post-hook, passing command result
-    hook.hook(ui, repo, "post-%s" % cmd, False, args=" ".join(fullargs),
-              result = ret)
-    return ret
-
-def runcommand(ui, options, cmd, cmdfunc):
-    def checkargs():
-        try:
-            return cmdfunc()
-        except TypeError, inst:
-            # was this an argument error?
-            tb = traceback.extract_tb(sys.exc_info()[2])
-            if len(tb) != 2: # no
-                raise
-            raise ParseError(cmd, _("invalid arguments"))
-
-    if options['profile']:
-        import hotshot, hotshot.stats
-        prof = hotshot.Profile("hg.prof")
-        try:
-            try:
-                return prof.runcall(checkargs)
-            except:
-                try:
-                    ui.warn(_('exception raised - generating '
-                             'profile anyway\n'))
-                except:
-                    pass
-                raise
-        finally:
-            prof.close()
-            stats = hotshot.stats.load("hg.prof")
-            stats.strip_dirs()
-            stats.sort_stats('time', 'calls')
-            stats.print_stats(40)
-    elif options['lsprof']:
-        try:
-            from mercurial import lsprof
-        except ImportError:
-            raise util.Abort(_(
-                'lsprof not available - install from '
-                'http://codespeak.net/svn/user/arigo/hack/misc/lsprof/'))
-        p = lsprof.Profiler()
-        p.enable(subcalls=True)
-        try:
-            return checkargs()
-        finally:
-            p.disable()
-            stats = lsprof.Stats(p.getstats())
-            stats.sort()
-            stats.pprint(top=10, file=sys.stderr, climit=5)
-    else:
-        return checkargs()
-
 def bail_if_changed(repo):
+    if repo.dirstate.parents()[1] != nullid:
+        raise util.Abort(_('outstanding uncommitted merge'))
     modified, added, removed, deleted = repo.status()[:4]
     if modified or added or removed or deleted:
         raise util.Abort(_("outstanding uncommitted changes"))
@@ -461,15 +96,6 @@
     if opts.get('remotecmd'):
         ui.setconfig("ui", "remotecmd", opts['remotecmd'])
 
-def parseurl(url, revs):
-    '''parse url#branch, returning url, branch + revs'''
-
-    if '#' not in url:
-        return url, (revs or None)
-
-    url, rev = url.split('#', 1)
-    return url, revs + [rev]
-
 def revpair(repo, revs):
     '''return pair of nodes, given list of revisions. second item can
     be None, meaning use working dir.'''
@@ -642,14 +268,15 @@
             mapping[abs] = rel, exact
             if repo.ui.verbose or not exact:
                 repo.ui.status(_('adding %s\n') % ((pats and rel) or abs))
-        if repo.dirstate[abs] != 'r' and not util.lexists(target):
+        if repo.dirstate[abs] != 'r' and (not util.lexists(target)
+            or (os.path.isdir(target) and not os.path.islink(target))):
             remove.append(abs)
             mapping[abs] = rel, exact
             if repo.ui.verbose or not exact:
                 repo.ui.status(_('removing %s\n') % ((pats and rel) or abs))
     if not dry_run:
+        repo.remove(remove)
         repo.add(add)
-        repo.remove(remove)
     if similarity > 0:
         for old, new, score in findrenames(repo, add, remove, similarity):
             oldrel, oldexact = mapping[old]
@@ -661,6 +288,206 @@
             if not dry_run:
                 repo.copy(old, new)
 
+def copy(ui, repo, pats, opts, rename=False):
+    # called with the repo lock held
+    #
+    # hgsep => pathname that uses "/" to separate directories
+    # ossep => pathname that uses os.sep to separate directories
+    cwd = repo.getcwd()
+    targets = {}
+    after = opts.get("after")
+    dryrun = opts.get("dry_run")
+
+    def walkpat(pat):
+        srcs = []
+        for tag, abs, rel, exact in walk(repo, [pat], opts, globbed=True):
+            state = repo.dirstate[abs]
+            if state in '?r':
+                if exact and state == '?':
+                    ui.warn(_('%s: not copying - file is not managed\n') % rel)
+                if exact and state == 'r':
+                    ui.warn(_('%s: not copying - file has been marked for'
+                              ' remove\n') % rel)
+                continue
+            # abs: hgsep
+            # rel: ossep
+            srcs.append((abs, rel, exact))
+        return srcs
+
+    # abssrc: hgsep
+    # relsrc: ossep
+    # otarget: ossep
+    def copyfile(abssrc, relsrc, otarget, exact):
+        abstarget = util.canonpath(repo.root, cwd, otarget)
+        reltarget = repo.pathto(abstarget, cwd)
+        target = repo.wjoin(abstarget)
+        src = repo.wjoin(abssrc)
+        state = repo.dirstate[abstarget]
+
+        # check for collisions
+        prevsrc = targets.get(abstarget)
+        if prevsrc is not None:
+            ui.warn(_('%s: not overwriting - %s collides with %s\n') %
+                    (reltarget, repo.pathto(abssrc, cwd),
+                     repo.pathto(prevsrc, cwd)))
+            return
+
+        # check for overwrites
+        exists = os.path.exists(target)
+        if (not after and exists or after and state in 'mn'):
+            if not opts['force']:
+                ui.warn(_('%s: not overwriting - file exists\n') %
+                        reltarget)
+                return
+
+        if after:
+            if not exists:
+                return
+        elif not dryrun:
+            try:
+                if exists:
+                    os.unlink(target)
+                targetdir = os.path.dirname(target) or '.'
+                if not os.path.isdir(targetdir):
+                    os.makedirs(targetdir)
+                util.copyfile(src, target)
+            except IOError, inst:
+                if inst.errno == errno.ENOENT:
+                    ui.warn(_('%s: deleted in working copy\n') % relsrc)
+                else:
+                    ui.warn(_('%s: cannot copy - %s\n') %
+                            (relsrc, inst.strerror))
+                    return True # report a failure
+
+        if ui.verbose or not exact:
+            action = rename and "moving" or "copying"
+            ui.status(_('%s %s to %s\n') % (action, relsrc, reltarget))
+
+        targets[abstarget] = abssrc
+
+        # fix up dirstate
+        origsrc = repo.dirstate.copied(abssrc) or abssrc
+        if abstarget == origsrc: # copying back a copy?
+            if state not in 'mn' and not dryrun:
+                repo.dirstate.normallookup(abstarget)
+        else:
+            if repo.dirstate[origsrc] == 'a':
+                if not ui.quiet:
+                    ui.warn(_("%s has not been committed yet, so no copy "
+                              "data will be stored for %s.\n")
+                            % (repo.pathto(origsrc, cwd), reltarget))
+                if abstarget not in repo.dirstate and not dryrun:
+                    repo.add([abstarget])
+            elif not dryrun:
+                repo.copy(origsrc, abstarget)
+
+        if rename and not dryrun:
+            repo.remove([abssrc], True)
+
+    # pat: ossep
+    # dest ossep
+    # srcs: list of (hgsep, hgsep, ossep, bool)
+    # return: function that takes hgsep and returns ossep
+    def targetpathfn(pat, dest, srcs):
+        if os.path.isdir(pat):
+            abspfx = util.canonpath(repo.root, cwd, pat)
+            abspfx = util.localpath(abspfx)
+            if destdirexists:
+                striplen = len(os.path.split(abspfx)[0])
+            else:
+                striplen = len(abspfx)
+            if striplen:
+                striplen += len(os.sep)
+            res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
+        elif destdirexists:
+            res = lambda p: os.path.join(dest,
+                                         os.path.basename(util.localpath(p)))
+        else:
+            res = lambda p: dest
+        return res
+
+    # pat: ossep
+    # dest ossep
+    # srcs: list of (hgsep, hgsep, ossep, bool)
+    # return: function that takes hgsep and returns ossep
+    def targetpathafterfn(pat, dest, srcs):
+        if util.patkind(pat, None)[0]:
+            # a mercurial pattern
+            res = lambda p: os.path.join(dest,
+                                         os.path.basename(util.localpath(p)))
+        else:
+            abspfx = util.canonpath(repo.root, cwd, pat)
+            if len(abspfx) < len(srcs[0][0]):
+                # A directory. Either the target path contains the last
+                # component of the source path or it does not.
+                def evalpath(striplen):
+                    score = 0
+                    for s in srcs:
+                        t = os.path.join(dest, util.localpath(s[0])[striplen:])
+                        if os.path.exists(t):
+                            score += 1
+                    return score
+
+                abspfx = util.localpath(abspfx)
+                striplen = len(abspfx)
+                if striplen:
+                    striplen += len(os.sep)
+                if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
+                    score = evalpath(striplen)
+                    striplen1 = len(os.path.split(abspfx)[0])
+                    if striplen1:
+                        striplen1 += len(os.sep)
+                    if evalpath(striplen1) > score:
+                        striplen = striplen1
+                res = lambda p: os.path.join(dest,
+                                             util.localpath(p)[striplen:])
+            else:
+                # a file
+                if destdirexists:
+                    res = lambda p: os.path.join(dest,
+                                        os.path.basename(util.localpath(p)))
+                else:
+                    res = lambda p: dest
+        return res
+
+
+    pats = util.expand_glob(pats)
+    if not pats:
+        raise util.Abort(_('no source or destination specified'))
+    if len(pats) == 1:
+        raise util.Abort(_('no destination specified'))
+    dest = pats.pop()
+    destdirexists = os.path.isdir(dest)
+    if not destdirexists:
+        if len(pats) > 1 or util.patkind(pats[0], None)[0]:
+            raise util.Abort(_('with multiple sources, destination must be an '
+                               'existing directory'))
+        if util.endswithsep(dest):
+            raise util.Abort(_('destination %s is not a directory') % dest)
+
+    tfn = targetpathfn
+    if after:
+        tfn = targetpathafterfn
+    copylist = []
+    for pat in pats:
+        srcs = walkpat(pat)
+        if not srcs:
+            continue
+        copylist.append((tfn(pat, dest, srcs), srcs))
+    if not copylist:
+        raise util.Abort(_('no files to copy'))
+
+    errors = 0
+    for targetpath, srcs in copylist:
+        for abssrc, relsrc, exact in srcs:
+            if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
+                errors += 1
+
+    if errors:
+        ui.warn(_('(consider using --after)\n'))
+
+    return errors
+
 def service(opts, parentfn=None, initfn=None, runfn=None):
     '''Run a command as a service.'''
 
@@ -668,6 +495,15 @@
         rfd, wfd = os.pipe()
         args = sys.argv[:]
         args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
+        # Don't pass --cwd to the child process, because we've already
+        # changed directory.
+        for i in xrange(1,len(args)):
+            if args[i].startswith('--cwd='):
+                del args[i]
+                break
+            elif args[i].startswith('--cwd'):
+                del args[i:i+2]
+                break
         pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
                          args[0], args)
         os.close(wfd)
@@ -837,7 +673,7 @@
 
     def __init__(self, ui, repo, patch, mapfile, buffered):
         changeset_printer.__init__(self, ui, repo, patch, buffered)
-        filters = templater.common_filters.copy()
+        filters = templatefilters.filters.copy()
         filters['formatnode'] = (ui.debugflag and (lambda x: x)
                                  or (lambda x: x[:12]))
         self.t = templater.templater(mapfile, filters,
@@ -947,25 +783,25 @@
             c = [{'name': x[0], 'source': x[1]} for x in copies]
             return showlist('file_copy', c, plural='file_copies', **args)
 
-        if self.ui.debugflag:
-            files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
-            def showfiles(**args):
-                return showlist('file', files[0], **args)
-            def showadds(**args):
-                return showlist('file_add', files[1], **args)
-            def showdels(**args):
-                return showlist('file_del', files[2], **args)
-            def showmanifest(**args):
-                args = args.copy()
-                args.update(dict(rev=self.repo.manifest.rev(changes[0]),
-                                 node=hex(changes[0])))
-                return self.t('manifest', **args)
-        else:
-            def showfiles(**args):
-                return showlist('file', changes[3], **args)
-            showadds = ''
-            showdels = ''
-            showmanifest = ''
+        files = []
+        def getfiles():
+            if not files:
+                files[:] = self.repo.status(
+                    log.parents(changenode)[0], changenode)[:3]
+            return files
+        def showfiles(**args):
+            return showlist('file', changes[3], **args)
+        def showmods(**args):
+            return showlist('file_mod', getfiles()[0], **args)
+        def showadds(**args):
+            return showlist('file_add', getfiles()[1], **args)
+        def showdels(**args):
+            return showlist('file_del', getfiles()[2], **args)
+        def showmanifest(**args):
+            args = args.copy()
+            args.update(dict(rev=self.repo.manifest.rev(changes[0]),
+                             node=hex(changes[0])))
+            return self.t('manifest', **args)
 
         defprops = {
             'author': changes[1],
@@ -974,6 +810,7 @@
             'desc': changes[4].strip(),
             'file_adds': showadds,
             'file_dels': showdels,
+            'file_mods': showmods,
             'files': showfiles,
             'file_copies': showcopies,
             'manifest': showmanifest,
@@ -1065,7 +902,7 @@
 
 def finddate(ui, repo, date):
     """Find the tipmost changeset that matches the given date spec"""
-    df = util.matchdate(date + " to " + date)
+    df = util.matchdate(date)
     get = util.cachefunc(lambda r: repo.changectx(r).changeset())
     changeiter, matchfn = walkchangerevs(ui, repo, [], get, {'rev':None})
     results = {}
@@ -1275,3 +1112,48 @@
             for rev in nrevs:
                 yield 'iter', rev, None
     return iterate(), matchfn
+
+def commit(ui, repo, commitfunc, pats, opts):
+    '''commit the specified files or all outstanding changes'''
+    message = logmessage(opts)
+
+    # extract addremove carefully -- this function can be called from a command
+    # that doesn't support addremove
+    if opts.get('addremove'):
+        addremove(repo, pats, opts)
+
+    fns, match, anypats = matchpats(repo, pats, opts)
+    if pats:
+        status = repo.status(files=fns, match=match)
+        modified, added, removed, deleted, unknown = status[:5]
+        files = modified + added + removed
+        slist = None
+        for f in fns:
+            if f == '.':
+                continue
+            if f not in files:
+                rf = repo.wjoin(f)
+                try:
+                    mode = os.lstat(rf)[stat.ST_MODE]
+                except OSError:
+                    raise util.Abort(_("file %s not found!") % rf)
+                if stat.S_ISDIR(mode):
+                    name = f + '/'
+                    if slist is None:
+                        slist = list(files)
+                        slist.sort()
+                    i = bisect.bisect(slist, name)
+                    if i >= len(slist) or not slist[i].startswith(name):
+                        raise util.Abort(_("no match under directory %s!")
+                                         % rf)
+                elif not (stat.S_ISREG(mode) or stat.S_ISLNK(mode)):
+                    raise util.Abort(_("can't commit %s: "
+                                       "unsupported file type!") % rf)
+                elif f not in repo.dirstate:
+                    raise util.Abort(_("file %s not tracked!") % rf)
+    else:
+        files = []
+    try:
+        return commitfunc(ui, repo, files, message, match, opts)
+    except ValueError, inst:
+        raise util.Abort(str(inst))
--- a/mercurial/commands.py	Thu Jul 26 07:56:27 2007 -0400
+++ b/mercurial/commands.py	Wed Feb 06 19:57:52 2008 -0800
@@ -5,14 +5,13 @@
 # This software may be used and distributed according to the terms
 # of the GNU General Public License, incorporated herein by reference.
 
-import demandimport; demandimport.enable()
 from node import *
 from i18n import _
-import bisect, os, re, sys, urllib, stat
-import ui, hg, util, revlog, bundlerepo, extensions
+import os, re, sys, urllib
+import hg, util, revlog, bundlerepo, extensions
 import difflib, patch, time, help, mdiff, tempfile
 import errno, version, socket
-import archival, changegroup, cmdutil, hgweb.server, sshserver
+import archival, changegroup, cmdutil, hgweb.server, sshserver, hbisect
 
 # Commands start here, listed alphabetically
 
@@ -27,17 +26,23 @@
     If no names are given, add all files in the repository.
     """
 
+    rejected = None
+    exacts = {}
     names = []
-    for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
+    for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
+                                             badmatch=util.always):
         if exact:
             if ui.verbose:
                 ui.status(_('adding %s\n') % rel)
             names.append(abs)
+            exacts[abs] = 1
         elif abs not in repo.dirstate:
             ui.status(_('adding %s\n') % rel)
             names.append(abs)
     if not opts.get('dry_run'):
-        repo.add(names)
+        rejected = repo.add(names)
+        rejected = [p for p in rejected if p in exacts]
+    return rejected and 1 or 0
 
 def addremove(ui, repo, *pats, **opts):
     """add all new files, delete all missing files
@@ -191,10 +196,13 @@
         raise util.Abort(_("please specify a revision to backout"))
 
     cmdutil.bail_if_changed(repo)
+    node = repo.lookup(rev)
+
     op1, op2 = repo.dirstate.parents()
-    if op2 != nullid:
-        raise util.Abort(_('outstanding uncommitted merge'))
-    node = repo.lookup(rev)
+    a = repo.changelog.ancestor(op1, node)
+    if a != node:
+        raise util.Abort(_('cannot back out change on a different branch'))
+
     p1, p2 = repo.changelog.parents(node)
     if p1 == nullid:
         raise util.Abort(_('cannot back out a change with no parents'))
@@ -211,6 +219,7 @@
         if opts['parent']:
             raise util.Abort(_('cannot use --parent on non-merge changeset'))
         parent = p1
+
     hg.clean(repo, node, show_stats=False)
     revert_opts = opts.copy()
     revert_opts['date'] = None
@@ -237,6 +246,90 @@
             ui.status(_('(use "backout --merge" '
                         'if you want to auto-merge)\n'))
 
+def bisect(ui, repo, rev=None, extra=None,
+               reset=None, good=None, bad=None, skip=None, noupdate=None):
+    """subdivision search of changesets
+
+    This command helps to find changesets which introduce problems.
+    To use, mark the earliest changeset you know exhibits the problem
+    as bad, then mark the latest changeset which is free from the
+    problem as good. Bisect will update your working directory to a
+    revision for testing. Once you have performed tests, mark the
+    working directory as bad or good and bisect will either update to
+    another candidate changeset or announce that it has found the bad
+    revision.
+    """
+    # backward compatibility
+    if rev in "good bad reset init".split():
+        ui.warn(_("(use of 'hg bisect <cmd>' is deprecated)\n"))
+        cmd, rev, extra = rev, extra, None
+        if cmd == "good":
+            good = True
+        elif cmd == "bad":
+            bad = True
+        else:
+            reset = True
+    elif extra or good + bad + skip + reset > 1:
+        raise util.Abort("Incompatible arguments")
+
+    if reset:
+        p = repo.join("bisect.state")
+        if os.path.exists(p):
+            os.unlink(p)
+        return
+
+    # load state
+    state = {'good': [], 'bad': [], 'skip': []}
+    if os.path.exists(repo.join("bisect.state")):
+        for l in repo.opener("bisect.state"):
+            kind, node = l[:-1].split()
+            node = repo.lookup(node)
+            if kind not in state:
+                raise util.Abort(_("unknown bisect kind %s") % kind)
+            state[kind].append(node)
+
+    # update state
+    node = repo.lookup(rev or '.')
+    if good:
+        state['good'].append(node)
+    elif bad:
+        state['bad'].append(node)
+    elif skip:
+        state['skip'].append(node)
+
+    # save state
+    f = repo.opener("bisect.state", "w", atomictemp=True)
+    wlock = repo.wlock()
+    try:
+        for kind in state:
+            for node in state[kind]:
+                f.write("%s %s\n" % (kind, hg.hex(node)))
+        f.rename()
+    finally:
+        del wlock
+
+    if not state['good'] or not state['bad']:
+        return
+
+    # actually bisect
+    node, changesets, good = hbisect.bisect(repo.changelog, state)
+    if changesets == 0:
+        ui.write(_("The first %s revision is:\n") % (good and "good" or "bad"))
+        displayer = cmdutil.show_changeset(ui, repo, {})
+        displayer.show(changenode=node)
+    elif node is not None:
+        # compute the approximate number of remaining tests
+        tests, size = 0, 2
+        while size <= changesets:
+            tests, size = tests + 1, size * 2
+        rev = repo.changelog.rev(node)
+        ui.write(_("Testing changeset %s:%s "
+                   "(%s changesets remaining, ~%s tests)\n")
+                 % (rev, hg.short(node), changesets, tests))
+        if not noupdate:
+            cmdutil.bail_if_changed(repo)
+            return hg.clean(repo, node)
+
 def branch(ui, repo, label=None, **opts):
     """set or show the current branch name
 
@@ -246,6 +339,8 @@
 
     Unless --force is specified, branch will not let you set a
     branch name that shadows an existing branch.
+
+    Use the command 'hg update' to switch to an existing branch.
     """
 
     if label:
@@ -265,6 +360,8 @@
     inactive.  If active is specified, only show active branches.
 
     A branch is considered active if it contains unmerged heads.
+
+    Use the command 'hg update' to switch to an existing branch.
     """
     b = repo.branchtags()
     heads = dict.fromkeys(repo.heads(), 1)
@@ -295,6 +392,7 @@
 
     If no destination repository is specified the destination is assumed
     to have all the nodes specified by one or more --base parameters.
+    To create a bundle containing all changesets, use --base null.
 
     The bundle file can then be transferred using conventional means and
     applied to another repository with the unbundle or pull command.
@@ -336,7 +434,7 @@
                         visit.append(p)
     else:
         cmdutil.setremoteconfig(ui, opts)
-        dest, revs = cmdutil.parseurl(
+        dest, revs, checkout = hg.parseurl(
             ui.expandpath(dest or 'default-push', dest or 'default'), revs)
         other = hg.repository(ui, dest)
         o = repo.findoutgoing(other, force=opts['force'])
@@ -425,244 +523,13 @@
     If a list of files is omitted, all changes reported by "hg status"
     will be committed.
 
-    If no commit message is specified, the editor configured in your hgrc
-    or in the EDITOR environment variable is started to enter a message.
+    If no commit message is specified, the configured editor is started to
+    enter a message.
     """
-    message = cmdutil.logmessage(opts)
-
-    if opts['addremove']:
-        cmdutil.addremove(repo, pats, opts)
-    fns, match, anypats = cmdutil.matchpats(repo, pats, opts)
-    if pats:
-        status = repo.status(files=fns, match=match)
-        modified, added, removed, deleted, unknown = status[:5]
-        files = modified + added + removed
-        slist = None
-        for f in fns:
-            if f == '.':
-                continue
-            if f not in files:
-                rf = repo.wjoin(f)
-                try:
-                    mode = os.lstat(rf)[stat.ST_MODE]
-                except OSError:
-                    raise util.Abort(_("file %s not found!") % rf)
-                if stat.S_ISDIR(mode):
-                    name = f + '/'
-                    if slist is None:
-                        slist = list(files)
-                        slist.sort()
-                    i = bisect.bisect(slist, name)
-                    if i >= len(slist) or not slist[i].startswith(name):
-                        raise util.Abort(_("no match under directory %s!")
-                                         % rf)
-                elif not (stat.S_ISREG(mode) or stat.S_ISLNK(mode)):
-                    raise util.Abort(_("can't commit %s: "
-                                       "unsupported file type!") % rf)
-                elif f not in repo.dirstate:
-                    raise util.Abort(_("file %s not tracked!") % rf)
-    else:
-        files = []
-    try:
-        repo.commit(files, message, opts['user'], opts['date'], match,
-                    force_editor=opts.get('force_editor'))
-    except ValueError, inst:
-        raise util.Abort(str(inst))
-
-def docopy(ui, repo, pats, opts):
-    # called with the repo lock held
-    #
-    # hgsep => pathname that uses "/" to separate directories
-    # ossep => pathname that uses os.sep to separate directories
-    cwd = repo.getcwd()
-    errors = 0
-    copied = []
-    targets = {}
-
-    # abs: hgsep
-    # rel: ossep
-    # return: hgsep
-    def okaytocopy(abs, rel, exact):
-        reasons = {'?': _('is not managed'),
-                   'r': _('has been marked for remove')}
-        state = repo.dirstate[abs]
-        reason = reasons.get(state)
-        if reason:
-            if exact:
-                ui.warn(_('%s: not copying - file %s\n') % (rel, reason))
-        else:
-            if state == 'a':
-                origsrc = repo.dirstate.copied(abs)
-                if origsrc is not None:
-                    return origsrc
-            return abs
-
-    # origsrc: hgsep
-    # abssrc: hgsep
-    # relsrc: ossep
-    # otarget: ossep
-    def copy(origsrc, abssrc, relsrc, otarget, exact):
-        abstarget = util.canonpath(repo.root, cwd, otarget)
-        reltarget = repo.pathto(abstarget, cwd)
-        prevsrc = targets.get(abstarget)
-        src = repo.wjoin(abssrc)
-        target = repo.wjoin(abstarget)
-        if prevsrc is not None:
-            ui.warn(_('%s: not overwriting - %s collides with %s\n') %
-                    (reltarget, repo.pathto(abssrc, cwd),
-                     repo.pathto(prevsrc, cwd)))
-            return
-        if (not opts['after'] and os.path.exists(target) or
-            opts['after'] and repo.dirstate[abstarget] in 'mn'):
-            if not opts['force']:
-                ui.warn(_('%s: not overwriting - file exists\n') %
-                        reltarget)
-                return
-            if not opts['after'] and not opts.get('dry_run'):
-                os.unlink(target)
-        if opts['after']:
-            if not os.path.exists(target):
-                return
-        else:
-            targetdir = os.path.dirname(target) or '.'
-            if not os.path.isdir(targetdir) and not opts.get('dry_run'):
-                os.makedirs(targetdir)
-            try:
-                restore = repo.dirstate[abstarget] == 'r'
-                if restore and not opts.get('dry_run'):
-                    repo.undelete([abstarget])
-                try:
-                    if not opts.get('dry_run'):
-                        util.copyfile(src, target)
-                    restore = False
-                finally:
-                    if restore:
-                        repo.remove([abstarget])
-            except IOError, inst:
-                if inst.errno == errno.ENOENT:
-                    ui.warn(_('%s: deleted in working copy\n') % relsrc)
-                else:
-                    ui.warn(_('%s: cannot copy - %s\n') %
-                            (relsrc, inst.strerror))
-                    errors += 1
-                    return
-        if ui.verbose or not exact:
-            ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
-        targets[abstarget] = abssrc
-        if abstarget != origsrc:
-            if repo.dirstate[origsrc] == 'a':
-                if not ui.quiet:
-                    ui.warn(_("%s has not been committed yet, so no copy "
-                              "data will be stored for %s.\n")
-                            % (repo.pathto(origsrc, cwd), reltarget))
-                if abstarget not in repo.dirstate and not opts.get('dry_run'):
-                    repo.add([abstarget])
-            elif not opts.get('dry_run'):
-                repo.copy(origsrc, abstarget)
-        copied.append((abssrc, relsrc, exact))
-
-    # pat: ossep
-    # dest ossep
-    # srcs: list of (hgsep, hgsep, ossep, bool)
-    # return: function that takes hgsep and returns ossep
-    def targetpathfn(pat, dest, srcs):
-        if os.path.isdir(pat):
-            abspfx = util.canonpath(repo.root, cwd, pat)
-            abspfx = util.localpath(abspfx)
-            if destdirexists:
-                striplen = len(os.path.split(abspfx)[0])
-            else:
-                striplen = len(abspfx)
-            if striplen:
-                striplen += len(os.sep)
-            res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
-        elif destdirexists:
-            res = lambda p: os.path.join(dest,
-                                         os.path.basename(util.localpath(p)))
-        else:
-            res = lambda p: dest
-        return res
-
-    # pat: ossep
-    # dest ossep
-    # srcs: list of (hgsep, hgsep, ossep, bool)
-    # return: function that takes hgsep and returns ossep
-    def targetpathafterfn(pat, dest, srcs):
-        if util.patkind(pat, None)[0]:
-            # a mercurial pattern
-            res = lambda p: os.path.join(dest,
-                                         os.path.basename(util.localpath(p)))
-        else:
-            abspfx = util.canonpath(repo.root, cwd, pat)
-            if len(abspfx) < len(srcs[0][0]):
-                # A directory. Either the target path contains the last
-                # component of the source path or it does not.
-                def evalpath(striplen):
-                    score = 0
-                    for s in srcs:
-                        t = os.path.join(dest, util.localpath(s[0])[striplen:])
-                        if os.path.exists(t):
-                            score += 1
-                    return score
-
-                abspfx = util.localpath(abspfx)
-                striplen = len(abspfx)
-                if striplen:
-                    striplen += len(os.sep)
-                if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
-                    score = evalpath(striplen)
-                    striplen1 = len(os.path.split(abspfx)[0])
-                    if striplen1:
-                        striplen1 += len(os.sep)
-                    if evalpath(striplen1) > score:
-                        striplen = striplen1
-                res = lambda p: os.path.join(dest,
-                                             util.localpath(p)[striplen:])
-            else:
-                # a file
-                if destdirexists:
-                    res = lambda p: os.path.join(dest,
-                                        os.path.basename(util.localpath(p)))
-                else:
-                    res = lambda p: dest
-        return res
-
-
-    pats = util.expand_glob(pats)
-    if not pats:
-        raise util.Abort(_('no source or destination specified'))
-    if len(pats) == 1:
-        raise util.Abort(_('no destination specified'))
-    dest = pats.pop()
-    destdirexists = os.path.isdir(dest)
-    if (len(pats) > 1 or util.patkind(pats[0], None)[0]) and not destdirexists:
-        raise util.Abort(_('with multiple sources, destination must be an '
-                         'existing directory'))
-    if opts['after']:
-        tfn = targetpathafterfn
-    else:
-        tfn = targetpathfn
-    copylist = []
-    for pat in pats:
-        srcs = []
-        for tag, abssrc, relsrc, exact in cmdutil.walk(repo, [pat], opts,
-                                                       globbed=True):
-            origsrc = okaytocopy(abssrc, relsrc, exact)
-            if origsrc:
-                srcs.append((origsrc, abssrc, relsrc, exact))
-        if not srcs:
-            continue
-        copylist.append((tfn(pat, dest, srcs), srcs))
-    if not copylist:
-        raise util.Abort(_('no files to copy'))
-
-    for targetpath, srcs in copylist:
-        for origsrc, abssrc, relsrc, exact in srcs:
-            copy(origsrc, abssrc, relsrc, targetpath(abssrc), exact)
-
-    if errors:
-        ui.warn(_('(consider using --after)\n'))
-    return errors, copied
+    def commitfunc(ui, repo, files, message, match, opts):
+        return repo.commit(files, message, opts['user'], opts['date'], match,
+                           force_editor=opts.get('force_editor'))
+    cmdutil.commit(ui, repo, commitfunc, pats, opts)
 
 def copy(ui, repo, *pats, **opts):
     """mark files as copied for the next commit
@@ -680,10 +547,9 @@
     """
     wlock = repo.wlock(False)
     try:
-        errs, copied = docopy(ui, repo, pats, opts)
+        return cmdutil.copy(ui, repo, pats, opts)
     finally:
         del wlock
-    return errs
 
 def debugancestor(ui, index, rev1, rev2):
     """find the ancestor revision of two revisions in a given index"""
@@ -698,7 +564,7 @@
         options = []
         otables = [globalopts]
         if cmd:
-            aliases, entry = cmdutil.findcmd(ui, cmd)
+            aliases, entry = cmdutil.findcmd(ui, cmd, table)
             otables.append(entry[1])
         for t in otables:
             for o in t:
@@ -708,10 +574,18 @@
         ui.write("%s\n" % "\n".join(options))
         return
 
-    clist = cmdutil.findpossible(ui, cmd).keys()
+    clist = cmdutil.findpossible(ui, cmd, table).keys()
     clist.sort()
     ui.write("%s\n" % "\n".join(clist))
 
+def debugfsinfo(ui, path = "."):
+    file('.debugfsinfo', 'w').write('')
+    ui.write('exec: %s\n' % (util.checkexec(path) and 'yes' or 'no'))
+    ui.write('symlink: %s\n' % (util.checklink(path) and 'yes' or 'no'))
+    ui.write('case-sensitive: %s\n' % (util.checkfolding('.debugfsinfo')
+                                and 'yes' or 'no'))
+    os.unlink('.debugfsinfo')
+
 def debugrebuildstate(ui, repo, rev=""):
     """rebuild the dirstate as it would look like for the given revision"""
     if rev == "":
@@ -795,20 +669,21 @@
 
 def debugstate(ui, repo):
     """show the contents of the current dirstate"""
-    dc = repo.dirstate._map
-    k = dc.keys()
+    k = repo.dirstate._map.items()
     k.sort()
-    for file_ in k:
-        if dc[file_][3] == -1:
+    for file_, ent in k:
+        if ent[3] == -1:
             # Pad or slice to locale representation
-            locale_len = len(time.strftime("%x %X", time.localtime(0)))
+            locale_len = len(time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(0)))
             timestr = 'unset'
             timestr = timestr[:locale_len] + ' '*(locale_len - len(timestr))
         else:
-            timestr = time.strftime("%x %X", time.localtime(dc[file_][3]))
-        ui.write("%c %3o %10d %s %s\n"
-                 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
-                    timestr, file_))
+            timestr = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(ent[3]))
+        if ent[1] & 020000:
+            mode = 'lnk'
+        else:
+            mode = '%3o' % (ent[1] & 0777)
+        ui.write("%c %s %10d %s %s\n" % (ent[0], mode, ent[2], timestr, file_))
     for f in repo.dirstate.copies():
         ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
 
@@ -839,7 +714,10 @@
              " nodeid       p1           p2\n")
     for i in xrange(r.count()):
         node = r.node(i)
-        pp = r.parents(node)
+        try:
+            pp = r.parents(node)
+        except:
+            pp = [nullid, nullid]
         ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
                 i, r.start(i), r.length(i), r.base(i), r.linkrev(node),
             short(node), short(pp[0]), short(pp[1])))
@@ -899,82 +777,45 @@
 
     # patch
     ui.status(_("Checking patch...\n"))
-    patcher = ui.config('ui', 'patch')
-    patcher = ((patcher and util.find_exe(patcher)) or
-               util.find_exe('gpatch') or
-               util.find_exe('patch'))
-    if not patcher:
-        ui.write(_(" Can't find patch or gpatch in PATH\n"))
-        ui.write(_(" (specify a patch utility in your .hgrc file)\n"))
-        problems += 1
+    patchproblems = 0
+    a = "1\n2\n3\n4\n"
+    b = "1\n2\n3\ninsert\n4\n"
+    fa = writetemp(a)
+    d = mdiff.unidiff(a, None, b, None, os.path.basename(fa),
+        os.path.basename(fa))
+    fd = writetemp(d)
+
+    files = {}
+    try:
+        patch.patch(fd, ui, cwd=os.path.dirname(fa), files=files)
+    except util.Abort, e:
+        ui.write(_(" patch call failed:\n"))
+        ui.write(" " + str(e) + "\n")
+        patchproblems += 1
     else:
-        # actually attempt a patch here
-        a = "1\n2\n3\n4\n"
-        b = "1\n2\n3\ninsert\n4\n"
-        fa = writetemp(a)
-        d = mdiff.unidiff(a, None, b, None, os.path.basename(fa))
-        fd = writetemp(d)
-
-        files = {}
-        try:
-            patch.patch(fd, ui, cwd=os.path.dirname(fa), files=files)
-        except util.Abort, e:
-            ui.write(_(" patch call failed:\n"))
-            ui.write(" " + str(e) + "\n")
-            problems += 1
+        if list(files) != [os.path.basename(fa)]:
+            ui.write(_(" unexpected patch output!\n"))
+            patchproblems += 1
+        a = file(fa).read()
+        if a != b:
+            ui.write(_(" patch test failed!\n"))
+            patchproblems += 1
+
+    if patchproblems:
+        if ui.config('ui', 'patch'):
+            ui.write(_(" (Current patch tool may be incompatible with patch,"
+                       " or misconfigured. Please check your .hgrc file)\n"))
         else:
-            if list(files) != [os.path.basename(fa)]:
-                ui.write(_(" unexpected patch output!"))
-                ui.write(_(" (you may have an incompatible version of patch)\n"))
-                problems += 1
-            a = file(fa).read()
-            if a != b:
-                ui.write(_(" patch test failed!"))
-                ui.write(_(" (you may have an incompatible version of patch)\n"))
-                problems += 1
-
-        os.unlink(fa)
-        os.unlink(fd)
-
-    # merge helper
-    ui.status(_("Checking merge helper...\n"))
-    cmd = (os.environ.get("HGMERGE") or ui.config("ui", "merge")
-           or "hgmerge")
-    cmdpath = util.find_exe(cmd) or util.find_exe(cmd.split()[0])
-    if not cmdpath:
-        if cmd == 'hgmerge':
-            ui.write(_(" No merge helper set and can't find default"
-                       " hgmerge script in PATH\n"))
-            ui.write(_(" (specify a merge helper in your .hgrc file)\n"))
-        else:
-            ui.write(_(" Can't find merge helper '%s' in PATH\n") % cmd)
-            ui.write(_(" (specify a merge helper in your .hgrc file)\n"))
-            problems += 1
-    else:
-        # actually attempt a patch here
-        fa = writetemp("1\n2\n3\n4\n")
-        fl = writetemp("1\n2\n3\ninsert\n4\n")
-        fr = writetemp("begin\n1\n2\n3\n4\n")
-        r = util.system('%s "%s" "%s" "%s"' % (cmd, fl, fa, fr))
-        if r:
-            ui.write(_(" Got unexpected merge error %d!\n") % r)
-            problems += 1
-        m = file(fl).read()
-        if m != "begin\n1\n2\n3\ninsert\n4\n":
-            ui.write(_(" Got unexpected merge results!\n"))
-            ui.write(_(" (your merge helper may have the"
-                       " wrong argument order)\n"))
-            ui.write(_(" Result: %r\n") % m)
-            problems += 1
-        os.unlink(fa)
-        os.unlink(fl)
-        os.unlink(fr)
+            ui.write(_(" Internal patcher failure, please report this error"
+                       " to http://www.selenic.com/mercurial/bts\n"))
+    problems += patchproblems
+
+    os.unlink(fa)
+    os.unlink(fd)
 
     # editor
     ui.status(_("Checking commit editor...\n"))
-    editor = (os.environ.get("HGEDITOR") or
-              ui.config("ui", "editor") or
-              os.environ.get("EDITOR", "vi"))
+    editor = ui.geteditor()
     cmdpath = util.find_exe(editor) or util.find_exe(editor.split()[0])
     if not cmdpath:
         if editor == 'vi':
@@ -1011,7 +852,8 @@
     ctx = repo.changectx(opts.get('rev', 'tip'))
     for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
                                              ctx.node()):
-        m = ctx.filectx(abs).renamed()
+        fctx = ctx.filectx(abs)
+        m = fctx.filelog().renamed(fctx.filenode())
         if m:
             ui.write(_("%s renamed from %s:%s\n") % (rel, m[0], hex(m[1])))
         else:
@@ -1223,9 +1065,9 @@
             for fn in fns:
                 if fn in skip:
                     continue
-                fstate.setdefault(fn, {})
                 try:
                     grepbody(fn, rev, getfile(fn).read(mf[fn]))
+                    fstate.setdefault(fn, [])
                     if follow:
                         copied = getfile(fn).renamed(mf[fn])
                         if copied:
@@ -1342,9 +1184,13 @@
         if with_version:
             version_(ui)
             ui.write('\n')
-        aliases, i = cmdutil.findcmd(ui, name)
+        aliases, i = cmdutil.findcmd(ui, name, table)
         # synopsis
-        ui.write("%s\n\n" % i[2])
+        ui.write("%s\n" % i[2])
+
+        # aliases
+        if not ui.quiet and len(aliases) > 1:
+            ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
 
         # description
         doc = i[0].__doc__
@@ -1352,13 +1198,9 @@
             doc = _("(No help text available)")
         if ui.quiet:
             doc = doc.splitlines(0)[0]
-        ui.write("%s\n" % doc.rstrip())
+        ui.write("\n%s\n" % doc.rstrip())
 
         if not ui.quiet:
-            # aliases
-            if len(aliases) > 1:
-                ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
-
             # options
             if i[1]:
                 option_lists.append((_("options:\n"), i[1]))
@@ -1505,12 +1347,16 @@
     name for non-default branches.
     """
 
+    if not repo and not source:
+        raise util.Abort(_("There is no Mercurial repository here "
+                           "(.hg not found)"))
+
     hexfunc = ui.debugflag and hex or short
     default = not (num or id or branch or tags)
     output = []
 
     if source:
-        source, revs = cmdutil.parseurl(ui.expandpath(source), [])
+        source, revs, checkout = hg.parseurl(ui.expandpath(source), [])
         srepo = hg.repository(ui, source)
         if not rev and revs:
             rev = revs[0]
@@ -1603,8 +1449,10 @@
                 data = patch.extract(ui, sys.stdin)
             else:
                 ui.status(_("applying %s\n") % p)
-                data = patch.extract(ui, file(pf, 'rb'))
-
+                if os.path.exists(pf):
+                    data = patch.extract(ui, file(pf, 'rb'))
+                else:
+                    data = patch.extract(ui, urllib.urlopen(pf))
             tmpname, message, user, date, branch, nodeid, p1, p2 = data
 
             if tmpname is None:
@@ -1650,16 +1498,21 @@
                                        files=files)
                 finally:
                     files = patch.updatedir(ui, repo, files)
-                n = repo.commit(files, message, user, date)
-                if opts.get('exact'):
-                    if hex(n) != nodeid:
-                        repo.rollback()
-                        raise util.Abort(_('patch is damaged' +
-                                           ' or loses information'))
+                if not opts.get('no_commit'):
+                    n = repo.commit(files, message, opts.get('user') or user,
+                                    opts.get('date') or date)
+                    if opts.get('exact'):
+                        if hex(n) != nodeid:
+                            repo.rollback()
+                            raise util.Abort(_('patch is damaged'
+                                               ' or loses information'))
+                    # Force a dirstate write so that the next transaction
+                    # backups an up-do-date file.
+                    repo.dirstate.write()
             finally:
                 os.unlink(tmpname)
     finally:
-        del wlock, lock
+        del lock, wlock
 
 def incoming(ui, repo, source="default", **opts):
     """show new changesets found in source
@@ -1673,17 +1526,13 @@
 
     See pull for valid source format details.
     """
-    source, revs = cmdutil.parseurl(ui.expandpath(source), opts['rev'])
+    source, revs, checkout = hg.parseurl(ui.expandpath(source), opts['rev'])
     cmdutil.setremoteconfig(ui, opts)
 
     other = hg.repository(ui, source)
-    ui.status(_('comparing with %s\n') % source)
+    ui.status(_('comparing with %s\n') % util.hidepassword(source))
     if revs:
-        if 'lookup' in other.capabilities:
-            revs = [other.lookup(rev) for rev in revs]
-        else:
-            error = _("Other repository doesn't support revision lookup, so a rev cannot be specified.")
-            raise util.Abort(error)
+        revs = [other.lookup(rev) for rev in revs]
     incoming = repo.findincoming(other, heads=revs, force=opts["force"])
     if not incoming:
         try:
@@ -1701,8 +1550,6 @@
             if revs is None:
                 cg = other.changegroup(incoming, "incoming")
             else:
-                if 'changegroupsubset' not in other.capabilities:
-                    raise util.Abort(_("Partial incoming cannot be done because other repository doesn't support changegroupsubset."))
                 cg = other.changegroupsubset(incoming, revs, 'incoming')
             bundletype = other.local() and "HG10BZ" or "HG10UN"
             fname = cleanup = changegroup.writebundle(cg, fname, bundletype)
@@ -1831,8 +1678,7 @@
         endrev = repo.changelog.count()
     rcache = {}
     ncache = {}
-    dcache = []
-    def getrenamed(fn, rev, man):
+    def getrenamed(fn, rev):
         '''looks up all renames for a file (up to endrev) the first
         time the file is given. It indexes on the changerev and only
         parses the manifest if linkrev != changerev.
@@ -1852,13 +1698,14 @@
                     break
         if rev in rcache[fn]:
             return rcache[fn][rev]
-        mr = repo.manifest.rev(man)
-        if repo.manifest.parentrevs(mr) != (mr - 1, nullrev):
-            return ncache[fn].get(repo.manifest.find(man, fn)[0])
-        if not dcache or dcache[0] != man:
-            dcache[:] = [man, repo.manifest.readdelta(man)]
-        if fn in dcache[1]:
-            return ncache[fn].get(dcache[1][fn])
+
+        # If linkrev != rev (i.e. rev not found in rcache) fallback to
+        # filectx logic.
+
+        try:
+            return repo.changectx(rev).filectx(fn).renamed()
+        except revlog.LookupError:
+            pass
         return None
 
     df = False
@@ -1895,9 +1742,8 @@
 
             copies = []
             if opts.get('copies') and rev:
-                mf = get(rev)[0]
                 for fn in get(rev)[3]:
-                    rename = getrenamed(fn, rev, mf)
+                    rename = getrenamed(fn, rev)
                     if rename:
                         copies.append((fn, rename[0]))
             displayer.show(rev, changenode, copies=copies)
@@ -1906,7 +1752,7 @@
             if displayer.flush(rev):
                 count += 1
 
-def manifest(ui, repo, rev=None):
+def manifest(ui, repo, node=None, rev=None):
     """output the current or given revision of the project manifest
 
     Print a list of version controlled files for the given revision.
@@ -1916,11 +1762,17 @@
     The manifest is the list of files being version controlled. If no revision
     is given then the first parent of the working directory is used.
 
-    With -v flag, print file permissions. With --debug flag, print
-    file revision hashes.
+    With -v flag, print file permissions, symlink and executable bits. With
+    --debug flag, print file revision hashes.
     """
 
-    m = repo.changectx(rev).manifest()
+    if rev and node:
+        raise util.Abort(_("please specify just one revision"))
+
+    if not node:
+        node = rev
+
+    m = repo.changectx(node).manifest()
     files = m.keys()
     files.sort()
 
@@ -1928,7 +1780,9 @@
         if ui.debugflag:
             ui.write("%40s " % hex(m[f]))
         if ui.verbose:
-            ui.write("%3s " % (m.execf(f) and "755" or "644"))
+            type = m.execf(f) and "*" or m.linkf(f) and "@" or " "
+            perm = m.execf(f) and "755" or "644"
+            ui.write("%3s %1s " % (perm, type))
         ui.write("%s\n" % f)
 
 def merge(ui, repo, node=None, force=None, rev=None):
@@ -1947,7 +1801,6 @@
 
     if rev and node:
         raise util.Abort(_("please specify just one revision"))
-
     if not node:
         node = rev
 
@@ -1957,10 +1810,13 @@
             raise util.Abort(_('repo has %d heads - '
                                'please merge with an explicit rev') %
                              len(heads))
+        parent = repo.dirstate.parents()[0]
         if len(heads) == 1:
-            raise util.Abort(_('there is nothing to merge - '
-                               'use "hg update" instead'))
-        parent = repo.dirstate.parents()[0]
+            msg = _('there is nothing to merge')
+            if parent != repo.lookup(repo.workingctx().branch()):
+                msg = _('%s - use "hg update" instead') % msg
+            raise util.Abort(msg)
+
         if parent not in heads:
             raise util.Abort(_('working dir not at a head rev - '
                                'use "hg update" or merge with an explicit rev'))
@@ -1976,14 +1832,14 @@
 
     See pull for valid destination format details.
     """
-    dest, revs = cmdutil.parseurl(
+    dest, revs, checkout = hg.parseurl(
         ui.expandpath(dest or 'default-push', dest or 'default'), opts['rev'])
     cmdutil.setremoteconfig(ui, opts)
     if revs:
         revs = [repo.lookup(rev) for rev in revs]
 
     other = hg.repository(ui, dest)
-    ui.status(_('comparing with %s\n') % dest)
+    ui.status(_('comparing with %s\n') % util.hidepassword(dest))
     o = repo.findoutgoing(other, force=opts['force'])
     if not o:
         ui.status(_("no changes found\n"))
@@ -2008,16 +1864,30 @@
     revision or the argument to --rev if given) is printed.
     """
     rev = opts.get('rev')
+    if rev:
+        ctx = repo.changectx(rev)
+    else:
+        ctx = repo.workingctx()
+
     if file_:
         files, match, anypats = cmdutil.matchpats(repo, (file_,), opts)
         if anypats or len(files) != 1:
             raise util.Abort(_('can only specify an explicit file name'))
-        ctx = repo.filectx(files[0], changeid=rev)
-    elif rev:
-        ctx = repo.changectx(rev)
+        file_ = files[0]
+        filenodes = []
+        for cp in ctx.parents():
+            if not cp:
+                continue
+            try:
+                filenodes.append(cp.filenode(file_))
+            except revlog.LookupError:
+                pass
+        if not filenodes:
+            raise util.Abort(_("'%s' not found in manifest!") % file_)
+        fl = repo.file(file_)
+        p = [repo.lookup(fl.linkrev(fn)) for fn in filenodes]
     else:
-        ctx = repo.workingctx()
-    p = [cp.node() for cp in ctx.parents()]
+        p = [cp.node() for cp in ctx.parents()]
 
     displayer = cmdutil.show_changeset(ui, repo, opts)
     for n in p:
@@ -2044,12 +1914,12 @@
         for name, path in ui.configitems("paths"):
             ui.write("%s = %s\n" % (name, path))
 
-def postincoming(ui, repo, modheads, optupdate):
+def postincoming(ui, repo, modheads, optupdate, checkout):
     if modheads == 0:
         return
     if optupdate:
-        if modheads == 1:
-            return hg.update(repo, None)
+        if modheads <= 1 or checkout:
+            return hg.update(repo, checkout)
         else:
             ui.status(_("not updating, since new heads added\n"))
     if modheads > 1:
@@ -2098,20 +1968,21 @@
       Alternatively specify "ssh -C" as your ssh command in your hgrc or
       with the --ssh command line option.
     """
-    source, revs = cmdutil.parseurl(ui.expandpath(source), opts['rev'])
+    source, revs, checkout = hg.parseurl(ui.expandpath(source), opts['rev'])
     cmdutil.setremoteconfig(ui, opts)
 
     other = hg.repository(ui, source)
-    ui.status(_('pulling from %s\n') % (source))
+    ui.status(_('pulling from %s\n') % util.hidepassword(source))
     if revs:
-        if 'lookup' in other.capabilities:
+        try:
             revs = [other.lookup(rev) for rev in revs]
-        else:
-            error = _("Other repository doesn't support revision lookup, so a rev cannot be specified.")
+        except repo.NoCapability:
+            error = _("Other repository doesn't support revision lookup, "
+                      "so a rev cannot be specified.")
             raise util.Abort(error)
 
     modheads = repo.pull(other, heads=revs, force=opts['force'])
-    return postincoming(ui, repo, modheads, opts['update'])
+    return postincoming(ui, repo, modheads, opts['update'], checkout)
 
 def push(ui, repo, dest=None, **opts):
     """push changes to the specified destination
@@ -2143,12 +2014,12 @@
     Pushing to http:// and https:// URLs is only possible, if this
     feature is explicitly enabled on the remote Mercurial server.
     """
-    dest, revs = cmdutil.parseurl(
+    dest, revs, checkout = hg.parseurl(
         ui.expandpath(dest or 'default-push', dest or 'default'), opts['rev'])
     cmdutil.setremoteconfig(ui, opts)
 
     other = hg.repository(ui, dest)
-    ui.status('pushing to %s\n' % (dest))
+    ui.status('pushing to %s\n' % util.hidepassword(dest))
     if revs:
         revs = [repo.lookup(rev) for rev in revs]
     r = repo.push(other, opts['force'], revs=revs)
@@ -2228,6 +2099,7 @@
                 forget.append(abs)
                 continue
             reason = _('has been marked for add (use -f to force removal)')
+            exact = 1 # force the message
         elif abs not in repo.dirstate:
             reason = _('is not managed')
         elif opts['after'] and not exact and abs not in deleted:
@@ -2260,20 +2132,15 @@
     """
     wlock = repo.wlock(False)
     try:
-        errs, copied = docopy(ui, repo, pats, opts)
-        names = []
-        for abs, rel, exact in copied:
-            if ui.verbose or not exact:
-                ui.status(_('removing %s\n') % rel)
-            names.append(abs)
-        if not opts.get('dry_run'):
-            repo.remove(names, True)
-        return errs
+        return cmdutil.copy(ui, repo, pats, opts, rename=True)
     finally:
         del wlock
 
 def revert(ui, repo, *pats, **opts):
-    """revert files or dirs to their states as of some revision
+    """restore individual files or dirs to an earlier state
+
+    (use update -r to check out earlier revisions, revert does not
+    change the working dir parents)
 
     With no revision specified, revert the named files or directories
     to the contents they had in the parent of the working directory.
@@ -2282,12 +2149,9 @@
     working directory has two parents, you must explicitly specify the
     revision to revert to.
 
-    Modified files are saved with a .orig suffix before reverting.
-    To disable these backups, use --no-backup.
-
     Using the -r option, revert the given files or directories to their
     contents as of a specific revision. This can be helpful to "roll
-    back" some or all of a change that should not have been committed.
+    back" some or all of an earlier  change.
 
     Revert modifies the working directory.  It does not commit any
     changes, or change the parent of the working directory.  If you
@@ -2301,6 +2165,9 @@
     If names are given, all files matching the names are reverted.
 
     If no arguments are given, no files are reverted.
+
+    Modified files are saved with a .orig suffix before reverting.
+    To disable these backups, use --no-backup.
     """
 
     if opts["date"]:
@@ -2361,6 +2228,14 @@
         changes = repo.status(match=names.has_key)[:5]
         modified, added, removed, deleted, unknown = map(dict.fromkeys, changes)
 
+        # if f is a rename, also revert the source
+        cwd = repo.getcwd()
+        for f in added:
+            src = repo.dirstate.copied(f)
+            if src and src not in names and repo.dirstate[src] == 'r':
+                removed[src] = None
+                names[src] = (repo.pathto(src, cwd), True)
+
         revert = ([], _('reverting %s\n'))
         add = ([], _('adding %s\n'))
         remove = ([], _('removing %s\n'))
@@ -2443,10 +2318,12 @@
         del wlock
 
 def rollback(ui, repo):
-    """roll back the last transaction in this repository
-
-    Roll back the last transaction in this repository, restoring the
-    project to its state prior to the transaction.
+    """roll back the last transaction
+
+    This command should be used with care. There is only one level of
+    rollback, and there is no way to undo a rollback. It will also
+    restore the dirstate at the time of the last transaction, losing
+    any dirstate changes since that time.
 
     Transactions are used to encapsulate the effects of all commands
     that create new changesets or propagate existing changesets into a
@@ -2459,11 +2336,6 @@
       push (with this repository as destination)
       unbundle
 
-    This command should be used with care. There is only one level of
-    rollback, and there is no way to undo a rollback. It will also
-    restore the dirstate at the time of the last transaction, which
-    may lose subsequent dirstate changes.
-
     This command is not intended for use on public repositories. Once
     changes are visible for pull by other users, rolling a transaction
     back locally is ineffective (someone else may already have pulled
@@ -2497,12 +2369,12 @@
         s.serve_forever()
 
     parentui = ui.parentui or ui
-    optlist = ("name templates style address port ipv6"
+    optlist = ("name templates style address port prefix ipv6"
                " accesslog errorlog webdir_conf certificate")
     for o in optlist.split():
         if opts[o]:
             parentui.setconfig("web", o, str(opts[o]))
-            if repo.ui != parentui:
+            if (repo is not None) and (repo.ui != parentui):
                 repo.ui.setconfig("web", o, str(opts[o]))
 
     if repo is None and not ui.config("web", "webdir_conf"):
@@ -2519,11 +2391,17 @@
 
             if not ui.verbose: return
 
+            if self.httpd.prefix:
+                prefix = self.httpd.prefix.strip('/') + '/'
+            else:
+                prefix = ''
+
             if self.httpd.port != 80:
-                ui.status(_('listening at http://%s:%d/\n') %
-                          (self.httpd.addr, self.httpd.port))
+                ui.status(_('listening at http://%s:%d/%s\n') %
+                          (self.httpd.addr, self.httpd.port, prefix))
             else:
-                ui.status(_('listening at http://%s/\n') % self.httpd.addr)
+                ui.status(_('listening at http://%s/%s\n') % 
+                          (self.httpd.addr, prefix))
 
         def run(self):
             self.httpd.serve_forever()
@@ -2536,8 +2414,10 @@
     """show changed files in the working directory
 
     Show status of files in the repository.  If names are given, only
-    files that match are shown.  Files that are clean or ignored, are
-    not listed unless -c (clean), -i (ignored) or -A is given.
+    files that match are shown.  Files that are clean or ignored or
+    source of a copy/move operation, are not listed unless -c (clean),
+    -i (ignored), -C (copies) or -A is given.  Unless options described
+    with "show only ..." are given, the options -mardu are used.
 
     NOTE: status may appear to disagree with diff if permissions have
     changed or a merge has occurred. The standard diff format does not
@@ -2554,7 +2434,7 @@
     C = clean
     ! = deleted, but still tracked
     ? = not tracked
-    I = ignored (not shown by default)
+    I = ignored
       = the previous added file was copied from here
     """
 
@@ -2626,8 +2506,15 @@
         rev_ = opts['rev']
     message = opts['message']
     if opts['remove']:
-        if not name in repo.tags():
+        tagtype = repo.tagtype(name)
+
+        if not tagtype:
             raise util.Abort(_('tag %s does not exist') % name)
+        if opts['local'] and tagtype == 'global':
+           raise util.Abort(_('%s tag is global') % name)
+        if not opts['local'] and tagtype == 'local':
+           raise util.Abort(_('%s tag is local') % name)
+
         rev_ = nullid
         if not message:
             message = _('Removed tag %s') % name
@@ -2649,23 +2536,33 @@
 
     List the repository tags.
 
-    This lists both regular and local tags.
+    This lists both regular and local tags. When the -v/--verbose switch
+    is used, a third column "local" is printed for local tags.
     """
 
     l = repo.tagslist()
     l.reverse()
     hexfunc = ui.debugflag and hex or short
+    tagtype = ""
+
     for t, n in l:
+        if ui.quiet:
+            ui.write("%s\n" % t)
+            continue
+
         try:
             hn = hexfunc(n)
-            r = "%5d:%s" % (repo.changelog.rev(n), hexfunc(n))
+            r = "%5d:%s" % (repo.changelog.rev(n), hn)
         except revlog.LookupError:
             r = "    ?:%s" % hn
-        if ui.quiet:
-            ui.write("%s\n" % t)
         else:
             spaces = " " * (30 - util.locallen(t))
-            ui.write("%s%s %s\n" % (t, spaces, r))
+            if ui.verbose:
+                if repo.tagtype(t) == 'local':
+                    tagtype = " local"
+                else:
+                    tagtype = ""
+            ui.write("%s%s %s%s\n" % (t, spaces, r, tagtype))
 
 def tip(ui, repo, **opts):
     """show the tip revision
@@ -2689,7 +2586,7 @@
         gen = changegroup.readbundle(f, fname)
         modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname)
 
-    return postincoming(ui, repo, modheads, opts['update'])
+    return postincoming(ui, repo, modheads, opts['update'], None)
 
 def update(ui, repo, node=None, rev=None, clean=False, date=None):
     """update working directory
@@ -2740,7 +2637,7 @@
     ui.write(_("Mercurial Distributed SCM (version %s)\n")
              % version.get_version())
     ui.status(_(
-        "\nCopyright (C) 2005-2007 Matt Mackall <mpm@selenic.com> and others\n"
+        "\nCopyright (C) 2005-2008 Matt Mackall <mpm@selenic.com> and others\n"
         "This is free software; see the source for copying conditions. "
         "There is NO\nwarranty; "
         "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
@@ -2787,6 +2684,11 @@
     ('l', 'logfile', '', _('read commit message from <file>')),
 ]
 
+commitopts2 = [
+    ('d', 'date', '', _('record datecode as commit date')),
+    ('u', 'user', '', _('record user as committer')),
+]
+
 table = {
     "^add": (add, walkopts + dryrunopts, _('hg add [OPTION]... [FILE]...')),
     "addremove":
@@ -2820,17 +2722,23 @@
         (backout,
          [('', 'merge', None,
            _('merge with old dirstate parent after backout')),
-          ('d', 'date', '', _('record datecode as commit date')),
           ('', 'parent', '', _('parent to choose when backing out merge')),
-          ('u', 'user', '', _('record user as committer')),
           ('r', 'rev', '', _('revision to backout')),
-         ] + walkopts + commitopts,
+         ] + walkopts + commitopts + commitopts2,
          _('hg backout [OPTION]... [-r] REV')),
+    "bisect":
+        (bisect,
+         [('r', 'reset', False, _('reset bisect state')),
+          ('g', 'good', False, _('mark changeset good')),
+          ('b', 'bad', False, _('mark changeset bad')),
+          ('s', 'skip', False, _('skip testing changeset')),
+          ('U', 'noupdate', False, _('do not update to target'))],
+         _("hg bisect [-gbsr] [REV]")),
     "branch":
         (branch,
          [('f', 'force', None,
            _('set branch name even if it shadows an existing branch'))],
-         _('hg branch [NAME]')),
+         _('hg branch [-f] [NAME]')),
     "branches":
         (branches,
          [('a', 'active', False,
@@ -2866,9 +2774,7 @@
         (commit,
          [('A', 'addremove', None,
            _('mark new/missing files as added/removed before committing')),
-          ('d', 'date', '', _('record datecode as commit date')),
-          ('u', 'user', '', _('record user as commiter')),
-         ] + walkopts + commitopts,
+         ] + walkopts + commitopts + commitopts2,
          _('hg commit [OPTION]... [FILE]...')),
     "copy|cp":
         (copy,
@@ -2877,31 +2783,41 @@
            _('forcibly copy over an existing managed file')),
          ] + walkopts + dryrunopts,
          _('hg copy [OPTION]... [SOURCE]... DEST')),
-    "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')),
+    "debugancestor": (debugancestor, [], _('hg debugancestor INDEX REV1 REV2')),
+    "debugcheckstate": (debugcheckstate, [], _('hg debugcheckstate')),
     "debugcomplete":
         (debugcomplete,
          [('o', 'options', None, _('show the command options'))],
-         _('debugcomplete [-o] CMD')),
-    "debuginstall": (debuginstall, [], _('debuginstall')),
+         _('hg debugcomplete [-o] CMD')),
+    "debugdate":
+        (debugdate,
+         [('e', 'extended', None, _('try extended date formats'))],
+         _('hg debugdate [-e] DATE [RANGE]')),
+    "debugdata": (debugdata, [], _('hg debugdata FILE REV')),
+    "debugfsinfo": (debugfsinfo, [], _('hg debugfsinfo [PATH]')),
+    "debugindex": (debugindex, [], _('hg debugindex FILE')),
+    "debugindexdot": (debugindexdot, [], _('hg debugindexdot FILE')),
+    "debuginstall": (debuginstall, [], _('hg debuginstall')),
+    "debugrawcommit|rawcommit":
+        (rawcommit,
+         [('p', 'parent', [], _('parent')),
+          ('F', 'files', '', _('file list'))
+          ] + commitopts + commitopts2,
+         _('hg debugrawcommit [OPTION]... [FILE]...')),
     "debugrebuildstate":
         (debugrebuildstate,
          [('r', 'rev', '', _('revision to rebuild to'))],
-         _('debugrebuildstate [-r REV] [REV]')),
-    "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')),
-    "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')),
-    "debugstate": (debugstate, [], _('debugstate')),
-    "debugdate":
-        (debugdate,
-         [('e', 'extended', None, _('try extended date formats'))],
-         _('debugdate [-e] DATE [RANGE]')),
-    "debugdata": (debugdata, [], _('debugdata FILE REV')),
-    "debugindex": (debugindex, [], _('debugindex FILE')),
-    "debugindexdot": (debugindexdot, [], _('debugindexdot FILE')),
+         _('hg debugrebuildstate [-r REV] [REV]')),
     "debugrename":
         (debugrename,
          [('r', 'rev', '', _('revision to debug'))],
-         _('debugrename [-r REV] FILE')),
-    "debugwalk": (debugwalk, walkopts, _('debugwalk [OPTION]... [FILE]...')),
+         _('hg debugrename [-r REV] FILE')),
+    "debugsetparents":
+        (debugsetparents,
+         [],
+         _('hg debugsetparents REV1 [REV2]')),
+    "debugstate": (debugstate, [], _('hg debugstate')),
+    "debugwalk": (debugwalk, walkopts, _('hg debugwalk [OPTION]... [FILE]...')),
     "^diff":
         (diff,
          [('r', 'rev', [], _('revision')),
@@ -2965,12 +2881,15 @@
           ('b', 'base', '', _('base path')),
           ('f', 'force', None,
            _('skip check for outstanding uncommitted changes')),
+          ('', 'no-commit', None, _("don't commit, just update the working directory")),
           ('', 'exact', None,
            _('apply patch to the nodes from which it was generated')),
           ('', 'import-branch', None,
-           _('Use any branch information in patch (implied by --exact)'))] + commitopts,
-         _('hg import [-p NUM] [-m MESSAGE] [-f] PATCH...')),
-    "incoming|in": (incoming,
+           _('Use any branch information in patch (implied by --exact)'))] +
+         commitopts + commitopts2,
+         _('hg import [OPTION]... PATCH...')),
+    "incoming|in":
+        (incoming,
          [('M', 'no-merges', None, _('do not show merges')),
           ('f', 'force', None,
            _('run even when remote repository is unrelated')),
@@ -3016,14 +2935,18 @@
           ('', 'template', '', _('display with template')),
          ] + walkopts,
          _('hg log [OPTION]... [FILE]')),
-    "manifest": (manifest, [], _('hg manifest [REV]')),
+    "manifest":
+        (manifest,
+         [('r', 'rev', '', _('revision to display'))],
+         _('hg manifest [-r REV]')),
     "^merge":
         (merge,
          [('f', 'force', None, _('force a merge with outstanding changes')),
           ('r', 'rev', '', _('revision to merge')),
              ],
          _('hg merge [-f] [[-r] REV]')),
-    "outgoing|out": (outgoing,
+    "outgoing|out":
+        (outgoing,
          [('M', 'no-merges', None, _('do not show merges')),
           ('f', 'force', None,
            _('run even when remote repository is unrelated')),
@@ -3057,18 +2980,10 @@
           ('r', 'rev', [], _('a specific revision you would like to push')),
          ] + remoteopts,
          _('hg push [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]')),
-    "debugrawcommit|rawcommit":
-        (rawcommit,
-         [('p', 'parent', [], _('parent')),
-          ('d', 'date', '', _('date code')),
-          ('u', 'user', '', _('user')),
-          ('F', 'files', '', _('file list'))
-          ] + commitopts,
-         _('hg debugrawcommit [OPTION]... [FILE]...')),
     "recover": (recover, [], _('hg recover')),
     "^remove|rm":
         (remove,
-         [('A', 'after', None, _('record remove that has already occurred')),
+         [('A', 'after', None, _('record remove without deleting')),
           ('f', 'force', None, _('remove file even if modified')),
          ] + walkopts,
          _('hg remove [OPTION]... FILE...')),
@@ -3079,7 +2994,7 @@
            _('forcibly copy over an existing managed file')),
          ] + walkopts + dryrunopts,
          _('hg rename [OPTION]... SOURCE... DEST')),
-    "^revert":
+    "revert":
         (revert,
          [('a', 'all', None, _('revert all changes when no arguments given')),
           ('d', 'date', '', _('tipmost revision matching date')),
@@ -3089,10 +3004,6 @@
          _('hg revert [OPTION]... [-r REV] [NAME]...')),
     "rollback": (rollback, [], _('hg rollback')),
     "root": (root, [], _('hg root')),
-    "showconfig|debugconfig":
-        (showconfig,
-         [('u', 'untrusted', None, _('show untrusted configuration options'))],
-         _('showconfig [-u] [NAME]...')),
     "^serve":
         (serve,
          [('A', 'accesslog', '', _('name of access log file to write to')),
@@ -3101,6 +3012,7 @@
           ('E', 'errorlog', '', _('name of error log file to write to')),
           ('p', 'port', 0, _('port to use (default: 8000)')),
           ('a', 'address', '', _('address to use')),
+          ('', 'prefix', '', _('prefix path to serve from (default: server root)')),
           ('n', 'name', '',
            _('name to show in web pages (default: working dir)')),
           ('', 'webdir-conf', '', _('name of the webdir config file'
@@ -3112,6 +3024,10 @@
           ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
           ('', 'certificate', '', _('SSL certificate file'))],
          _('hg serve [OPTION]...')),
+    "showconfig|debugconfig":
+        (showconfig,
+         [('u', 'untrusted', None, _('show untrusted configuration options'))],
+         _('hg showconfig [-u] [NAME]...')),
     "^status|st":
         (status,
          [('A', 'all', None, _('show status of all files')),
@@ -3133,11 +3049,11 @@
         (tag,
          [('f', 'force', None, _('replace existing tag')),
           ('l', 'local', None, _('make the tag local')),
-          ('m', 'message', '', _('message for tag commit log entry')),
-          ('d', 'date', '', _('record datecode as commit date')),
-          ('u', 'user', '', _('record user as commiter')),
           ('r', 'rev', '', _('revision to tag')),
-          ('', 'remove', None, _('remove a tag'))],
+          ('', 'remove', None, _('remove a tag')),
+          # -l/--local is already there, commitopts cannot be used
+          ('m', 'message', '', _('use <text> as commit message')),
+         ] + commitopts2,
          _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME')),
     "tags": (tags, [], _('hg tags')),
     "tip":
@@ -3161,19 +3077,6 @@
     "version": (version_, [], _('hg version')),
 }
 
-extensions.commandtable = table
-
 norepo = ("clone init version help debugancestor debugcomplete debugdata"
-          " debugindex debugindexdot debugdate debuginstall")
-optionalrepo = ("paths serve showconfig")
-
-def dispatch(args, argv0=None):
-    try:
-        u = ui.ui(traceback='--traceback' in args)
-    except util.Abort, inst:
-        sys.stderr.write(_("abort: %s\n") % inst)
-        return -1
-    return cmdutil.runcatch(u, args, argv0=argv0)
-
-def run():
-    sys.exit(dispatch(sys.argv[1:], argv0=sys.argv[0]))
+          " debugindex debugindexdot debugdate debuginstall debugfsinfo")
+optionalrepo = ("identify paths serve showconfig")
--- a/mercurial/context.py	Thu Jul 26 07:56:27 2007 -0400
+++ b/mercurial/context.py	Wed Feb 06 19:57:52 2008 -0800
@@ -70,7 +70,7 @@
         a = self._manifest.keys()
         a.sort()
         for f in a:
-            return f
+            yield f
 
     def changeset(self): return self._changeset
     def manifest(self): return self._manifest
@@ -82,6 +82,7 @@
     def files(self): return self._changeset[3]
     def description(self): return self._changeset[4]
     def branch(self): return self._changeset[5].get("branch")
+    def extra(self): return self._changeset[5]
     def tags(self): return self._repo.nodetags(self._node)
 
     def parents(self):
@@ -94,20 +95,29 @@
         c = self._repo.changelog.children(self._node)
         return [changectx(self._repo, x) for x in c]
 
-    def filenode(self, path):
+    def _fileinfo(self, path):
         if '_manifest' in self.__dict__:
             try:
-                return self._manifest[path]
+                return self._manifest[path], self._manifest.flags(path)
             except KeyError:
-                raise revlog.LookupError(_("'%s' not found in manifest") % path)
+                raise revlog.LookupError(path, _("'%s' not found in manifest") % path)
         if '_manifestdelta' in self.__dict__ or path in self.files():
             if path in self._manifestdelta:
-                return self._manifestdelta[path]
+                return self._manifestdelta[path], self._manifestdelta.flags(path)
         node, flag = self._repo.manifest.find(self._changeset[0], path)
         if not node:
-            raise revlog.LookupError(_("'%s' not found in manifest") % path)
+            raise revlog.LookupError(path, _("'%s' not found in manifest") % path)
+
+        return node, flag
 
-        return node
+    def filenode(self, path):
+        return self._fileinfo(path)[0]
+
+    def fileflags(self, path):
+        try:
+            return self._fileinfo(path)[1]
+        except revlog.LookupError:
+            return ''
 
     def filectx(self, path, fileid=None, filelog=None):
         """get a file context from this changeset"""
@@ -149,12 +159,11 @@
         if filelog:
             self._filelog = filelog
 
-        if fileid is None:
-            if changectx is None:
-                self._changeid = changeid
-            else:
-                self._changectx = changectx
-        else:
+        if changeid is not None:
+            self._changeid = changeid
+        if changectx is not None:
+            self._changectx = changectx
+        if fileid is not None:
             self._fileid = fileid
 
     def __getattr__(self, name):
@@ -165,7 +174,10 @@
             self._filelog = self._repo.file(self._path)
             return self._filelog
         elif name == '_changeid':
-            self._changeid = self._filelog.linkrev(self._filenode)
+            if '_changectx' in self.__dict__:
+                self._changeid = self._changectx.rev()
+            else:
+                self._changeid = self._filelog.linkrev(self._filenode)
             return self._changeid
         elif name == '_filenode':
             if '_fileid' in self.__dict__:
@@ -211,13 +223,19 @@
 
     def filerev(self): return self._filerev
     def filenode(self): return self._filenode
+    def fileflags(self): return self._changectx.fileflags(self._path)
+    def isexec(self): return 'x' in self.fileflags()
+    def islink(self): return 'l' in self.fileflags()
     def filelog(self): return self._filelog
 
     def rev(self):
         if '_changectx' in self.__dict__:
             return self._changectx.rev()
+        if '_changeid' in self.__dict__:
+            return self._changectx.rev() 
         return self._filelog.linkrev(self._filenode)
 
+    def linkrev(self): return self._filelog.linkrev(self._filenode)
     def node(self): return self._changectx.node()
     def user(self): return self._changectx.user()
     def date(self): return self._changectx.date()
@@ -228,18 +246,42 @@
     def changectx(self): return self._changectx
 
     def data(self): return self._filelog.read(self._filenode)
-    def renamed(self): return self._filelog.renamed(self._filenode)
     def path(self): return self._path
     def size(self): return self._filelog.size(self._filerev)
 
     def cmp(self, text): return self._filelog.cmp(self._filenode, text)
 
+    def renamed(self):
+        """check if file was actually renamed in this changeset revision
+
+        If rename logged in file revision, we report copy for changeset only
+        if file revisions linkrev points back to the changeset in question
+        or both changeset parents contain different file revisions.
+        """
+
+        renamed = self._filelog.renamed(self._filenode)
+        if not renamed:
+            return renamed
+
+        if self.rev() == self.linkrev():
+            return renamed
+
+        name = self.path()
+        fnode = self._filenode
+        for p in self._changectx.parents():
+            try:
+                if fnode == p.filenode(name):
+                    return None
+            except revlog.LookupError:
+                pass
+        return renamed
+
     def parents(self):
         p = self._path
         fl = self._filelog
         pl = [(p, n, fl) for n in self._filelog.parents(self._filenode)]
 
-        r = self.renamed()
+        r = self._filelog.renamed(self._filenode)
         if r:
             pl[0] = (r[0], r[1], None)
 
@@ -305,7 +347,7 @@
             return [getctx(p, n) for p, n in pl if n != nullrev]
 
         # use linkrev to find the first changeset where self appeared
-        if self.rev() != self._filelog.linkrev(self._filenode):
+        if self.rev() != self.linkrev():
             base = self.filectx(self.filerev())
         else:
             base = self
@@ -414,9 +456,11 @@
         """generate a manifest corresponding to the working directory"""
 
         man = self._parents[0].manifest().copy()
-        is_exec = util.execfunc(self._repo.root, man.execf)
-        is_link = util.linkfunc(self._repo.root, man.linkf)
         copied = self._repo.dirstate.copies()
+        is_exec = util.execfunc(self._repo.root,
+                                lambda p: man.execf(copied.get(p,p)))
+        is_link = util.linkfunc(self._repo.root,
+                                lambda p: man.linkf(copied.get(p,p)))
         modified, added, removed, deleted, unknown = self._status[:5]
         for i, l in (("a", added), ("m", modified), ("u", unknown)):
             for f in l:
@@ -462,6 +506,27 @@
     def children(self):
         return []
 
+    def fileflags(self, path):
+        if '_manifest' in self.__dict__:
+            try:
+                return self._manifest.flags(path)
+            except KeyError:
+                return ''
+
+        pnode = self._parents[0].changeset()[0]
+        orig = self._repo.dirstate.copies().get(path, path)
+        node, flag = self._repo.manifest.find(pnode, orig)
+        is_link = util.linkfunc(self._repo.root, lambda p: 'l' in flag)
+        is_exec = util.execfunc(self._repo.root, lambda p: 'x' in flag)
+        try:
+            return (is_link(path) and 'l' or '') + (is_exec(path) and 'e' or '')
+        except OSError:
+            pass
+
+        if not node or path in self.deleted() or path in self.removed():
+            return ''
+        return flag
+
     def filectx(self, path, filelog=None):
         """get a file context from the working directory"""
         return workingfilectx(self._repo, path, workingctx=self,
--- a/mercurial/demandimport.py	Thu Jul 26 07:56:27 2007 -0400
+++ b/mercurial/demandimport.py	Wed Feb 06 19:57:52 2008 -0800
@@ -67,7 +67,7 @@
             return "<proxied module '%s'>" % self._data[0]
         return "<unloaded module '%s'>" % self._data[0]
     def __call__(self, *args, **kwargs):
-        raise TypeError("'unloaded module' object is not callable")
+        raise TypeError("%s object is not callable" % repr(self))
     def __getattribute__(self, attr):
         if attr in ('_data', '_extend', '_load', '_module'):
             return object.__getattribute__(self, attr)
@@ -77,7 +77,7 @@
         self._load()
         setattr(self._module, attr, val)
 
-def _demandimport(name, globals=None, locals=None, fromlist=None):
+def _demandimport(name, globals=None, locals=None, fromlist=None, level=None):
     if not locals or name in ignore or fromlist == ('*',):
         # these cases we can't really delay
         return _origimport(name, globals, locals, fromlist)
@@ -95,6 +95,9 @@
                 return locals[base]
         return _demandmod(name, globals, locals)
     else:
+        if level is not None:
+            # from . import b,c,d or from .a import b,c,d
+            return _origimport(name, globals, locals, fromlist, level)
         # from a import b,c,d
         mod = _origimport(name, globals, locals)
         # recurse down the module chain
@@ -108,7 +111,18 @@
                 setattr(mod, x, _demandmod(x, mod.__dict__, locals))
         return mod
 
-ignore = ['_hashlib', '_xmlplus', 'fcntl', 'win32com.gen_py']
+ignore = [
+    '_hashlib',
+    '_xmlplus',
+    'fcntl',
+    'win32com.gen_py',
+    # imported by tarfile, not available under Windows
+    'pwd',
+    'grp',
+    # imported by profile, itself imported by hotshot.stats,
+    # not available under Windows
+    'resource',
+    ]
 
 def enable():
     "enable global demand-loading of modules"
--- a/mercurial/diffhelpers.c	Thu Jul 26 07:56:27 2007 -0400
+++ b/mercurial/diffhelpers.c	Wed Feb 06 19:57:52 2008 -0800
@@ -83,6 +83,12 @@
 				_fix_newline(hunk, a, b);
 				continue;
 			}
+			if (c == '\n') {
+				/* Some patches may be missing the control char
+				 * on empty lines. Supply a leading space. */
+				Py_DECREF(x);
+				x = PyString_FromString(" \n");
+			}
 			PyList_Append(hunk, x);
 			if (c == '+') {
 				l = PyString_FromString(s + 1);
--- a/mercurial/dirstate.py	Thu Jul 26 07:56:27 2007 -0400
+++ b/mercurial/dirstate.py	Wed Feb 06 19:57:52 2008 -0800
@@ -10,7 +10,7 @@
 from node import *
 from i18n import _
 import struct, os, time, bisect, stat, strutil, util, re, errno, ignore
-import cStringIO
+import cStringIO, osutil
 
 _unknown = ('?', 0, 0, 0)
 _format = ">cllll"
@@ -50,7 +50,8 @@
         elif name == '_dirs':
             self._dirs = {}
             for f in self._map:
-                self._incpath(f)
+                if self[f] != 'r':
+                    self._incpath(f)
             return self._dirs
         elif name == '_ignore':
             files = [self._join('.hgignore')]
@@ -73,7 +74,7 @@
         if cwd == self._root: return ''
         # self._root ends with a path separator if self._root is '/' or 'C:\'
         rootsep = self._root
-        if not rootsep.endswith(os.sep):
+        if not util.endswithsep(rootsep):
             rootsep += os.sep
         if cwd.startswith(rootsep):
             return cwd[len(rootsep):]
@@ -86,7 +87,7 @@
             cwd = self.getcwd()
         path = util.pathto(self._root, cwd, f)
         if self._slash:
-            return path.replace(os.sep, '/')
+            return util.normpath(path)
         return path
 
     def __getitem__(self, key):
@@ -141,22 +142,20 @@
         dmap = self._map
         copymap = self._copymap
         unpack = struct.unpack
-
-        pos = 40
         e_size = struct.calcsize(_format)
+        pos1 = 40
+        l = len(st)
 
-        while pos < len(st):
-            newpos = pos + e_size
-            e = unpack(_format, st[pos:newpos])
-            l = e[4]
-            pos = newpos
-            newpos = pos + l
-            f = st[pos:newpos]
+        # the inner loop
+        while pos1 < l:
+            pos2 = pos1 + e_size
+            e = unpack(">cllll", st[pos1:pos2]) # a literal here is faster
+            pos1 = pos2 + e[4]
+            f = st[pos2:pos1]
             if '\0' in f:
                 f, c = f.split('\0')
                 copymap[f] = c
-            dmap[f] = e[:4]
-            pos = newpos
+            dmap[f] = e # we hold onto e[4] because making a subtuple is slow
 
     def invalidate(self):
         for a in "_map _copymap _branch _pl _dirs _ignore".split():
@@ -175,60 +174,99 @@
         return self._copymap
 
     def _incpath(self, path):
-        for c in strutil.findall(path, '/'):
-            pc = path[:c]
-            self._dirs.setdefault(pc, 0)
-            self._dirs[pc] += 1
+        c = path.rfind('/')
+        if c >= 0:
+            dirs = self._dirs
+            base = path[:c]
+            if base not in dirs:
+                self._incpath(base)
+                dirs[base] = 1
+            else:
+                dirs[base] += 1
 
     def _decpath(self, path):
-        for c in strutil.findall(path, '/'):
-            pc = path[:c]
-            self._dirs.setdefault(pc, 0)
-            self._dirs[pc] -= 1
+        c = path.rfind('/')
+        if c >= 0:
+            base = path[:c]
+            dirs = self._dirs
+            if dirs[base] == 1:
+                del dirs[base]
+                self._decpath(base)
+            else:
+                dirs[base] -= 1
 
     def _incpathcheck(self, f):
         if '\r' in f or '\n' in f:
             raise util.Abort(_("'\\n' and '\\r' disallowed in filenames"))
         # shadows
         if f in self._dirs:
-            raise util.Abort(_('directory named %r already in dirstate') % f)
+            raise util.Abort(_('directory %r already in dirstate') % f)
         for c in strutil.rfindall(f, '/'):
             d = f[:c]
             if d in self._dirs:
                 break
-            if d in self._map:
-                raise util.Abort(_('file named %r already in dirstate') % d)
+            if d in self._map and self[d] != 'r':
+                raise util.Abort(_('file %r in dirstate clashes with %r') %
+                                 (d, f))
         self._incpath(f)
 
+    def _changepath(self, f, newstate, relaxed=False):
+        # handle upcoming path changes
+        oldstate = self[f]
+        if oldstate not in "?r" and newstate in "?r":
+            if "_dirs" in self.__dict__:
+                self._decpath(f)
+            return
+        if oldstate in "?r" and newstate not in "?r":
+            if relaxed and oldstate == '?':
+                # XXX
+                # in relaxed mode we assume the caller knows
+                # what it is doing, workaround for updating
+                # dir-to-file revisions
+                if "_dirs" in self.__dict__:
+                    self._incpath(f)
+                return
+            self._incpathcheck(f)
+            return
+
     def normal(self, f):
-        'mark a file normal'
+        'mark a file normal and clean'
         self._dirty = True
+        self._changepath(f, 'n', True)
         s = os.lstat(self._join(f))
-        self._map[f] = ('n', s.st_mode, s.st_size, s.st_mtime)
-        if self._copymap.has_key(f):
+        self._map[f] = ('n', s.st_mode, s.st_size, s.st_mtime, 0)
+        if f in self._copymap:
+            del self._copymap[f]
+
+    def normallookup(self, f):
+        'mark a file normal, but possibly dirty'
+        self._dirty = True
+        self._changepath(f, 'n', True)
+        self._map[f] = ('n', 0, -1, -1, 0)
+        if f in self._copymap:
             del self._copymap[f]
 
     def normaldirty(self, f):
-        'mark a file normal, but possibly dirty'
+        'mark a file normal, but dirty'
         self._dirty = True
-        s = os.lstat(self._join(f))
-        self._map[f] = ('n', s.st_mode, -1, -1)
+        self._changepath(f, 'n', True)
+        self._map[f] = ('n', 0, -2, -1, 0)
         if f in self._copymap:
             del self._copymap[f]
 
     def add(self, f):
         'mark a file added'
         self._dirty = True
-        self._incpathcheck(f)
-        self._map[f] = ('a', 0, -1, -1)
+        self._changepath(f, 'a')
+        self._map[f] = ('a', 0, -1, -1, 0)
         if f in self._copymap:
             del self._copymap[f]
 
     def remove(self, f):
         'mark a file removed'
         self._dirty = True
-        self._map[f] = ('r', 0, 0, 0)
-        self._decpath(f)
+        self._changepath(f, 'r')
+        self._map[f] = ('r', 0, 0, 0, 0)
         if f in self._copymap:
             del self._copymap[f]
 
@@ -236,7 +274,8 @@
         'mark a file merged'
         self._dirty = True
         s = os.lstat(self._join(f))
-        self._map[f] = ('m', s.st_mode, s.st_size, s.st_mtime)
+        self._changepath(f, 'm', True)
+        self._map[f] = ('m', s.st_mode, s.st_size, s.st_mtime, 0)
         if f in self._copymap:
             del self._copymap[f]
 
@@ -244,18 +283,26 @@
         'forget a file'
         self._dirty = True
         try:
+            self._changepath(f, '?')
             del self._map[f]
-            self._decpath(f)
         except KeyError:
             self._ui.warn(_("not in dirstate: %s!\n") % f)
 
+    def clear(self):
+        self._map = {}
+        if "_dirs" in self.__dict__:
+            delattr(self, "_dirs");
+        self._copymap = {}
+        self._pl = [nullid, nullid]
+        self._dirty = True
+
     def rebuild(self, parent, files):
-        self.invalidate()
+        self.clear()
         for f in files:
             if files.execf(f):
-                self._map[f] = ('n', 0777, -1, 0)
+                self._map[f] = ('n', 0777, -1, 0, 0)
             else:
-                self._map[f] = ('n', 0666, -1, 0)
+                self._map[f] = ('n', 0666, -1, 0, 0)
         self._pl = (parent, nullid)
         self._dirty = True
 
@@ -263,14 +310,16 @@
         if not self._dirty:
             return
         cs = cStringIO.StringIO()
-        cs.write("".join(self._pl))
+        copymap = self._copymap
+        pack = struct.pack
+        write = cs.write
+        write("".join(self._pl))
         for f, e in self._map.iteritems():
-            c = self.copied(f)
-            if c:
-                f = f + "\0" + c
-            e = struct.pack(_format, e[0], e[1], e[2], e[3], len(f))
-            cs.write(e)
-            cs.write(f)
+            if f in copymap:
+                f = "%s\0%s" % (f, copymap[f])
+            e = pack(_format, e[0], e[1], e[2], e[3], len(f))
+            write(e)
+            write(f)
         st = self._opener("dirstate", "w", atomictemp=True)
         st.write(cs.getvalue())
         st.rename()
@@ -306,16 +355,16 @@
                 bs += 1
         return ret
 
-    def _supported(self, f, st, verbose=False):
-        if stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode):
+    def _supported(self, f, mode, verbose=False):
+        if stat.S_ISREG(mode) or stat.S_ISLNK(mode):
             return True
         if verbose:
             kind = 'unknown'
-            if stat.S_ISCHR(st.st_mode): kind = _('character device')
-            elif stat.S_ISBLK(st.st_mode): kind = _('block device')
-            elif stat.S_ISFIFO(st.st_mode): kind = _('fifo')
-            elif stat.S_ISSOCK(st.st_mode): kind = _('socket')
-            elif stat.S_ISDIR(st.st_mode): kind = _('directory')
+            if stat.S_ISCHR(mode): kind = _('character device')
+            elif stat.S_ISBLK(mode): kind = _('block device')
+            elif stat.S_ISFIFO(mode): kind = _('fifo')
+            elif stat.S_ISSOCK(mode): kind = _('socket')
+            elif stat.S_ISDIR(mode): kind = _('directory')
             self._ui.warn(_('%s: unsupported file type (type is %s)\n')
                           % (self.pathto(f), kind))
         return False
@@ -361,61 +410,74 @@
 
         # self._root may end with a path separator when self._root == '/'
         common_prefix_len = len(self._root)
-        if not self._root.endswith(os.sep):
+        if not util.endswithsep(self._root):
             common_prefix_len += 1
+
+        normpath = util.normpath
+        listdir = osutil.listdir
+        lstat = os.lstat
+        bisect_left = bisect.bisect_left
+        isdir = os.path.isdir
+        pconvert = util.pconvert
+        join = os.path.join
+        s_isdir = stat.S_ISDIR
+        supported = self._supported
+        _join = self._join
+        known = {'.hg': 1}
+
         # recursion free walker, faster than os.walk.
         def findfiles(s):
             work = [s]
+            wadd = work.append
+            found = []
+            add = found.append
             if directories:
-                yield 'd', util.normpath(s[common_prefix_len:]), os.lstat(s)
+                add((normpath(s[common_prefix_len:]), 'd', lstat(s)))
             while work:
                 top = work.pop()
-                names = os.listdir(top)
-                names.sort()
+                entries = listdir(top, stat=True)
                 # nd is the top of the repository dir tree
-                nd = util.normpath(top[common_prefix_len:])
+                nd = normpath(top[common_prefix_len:])
                 if nd == '.':
                     nd = ''
                 else:
                     # do not recurse into a repo contained in this
                     # one. use bisect to find .hg directory so speed
                     # is good on big directory.
-                    hg = bisect.bisect_left(names, '.hg')
+                    names = [e[0] for e in entries]
+                    hg = bisect_left(names, '.hg')
                     if hg < len(names) and names[hg] == '.hg':
-                        if os.path.isdir(os.path.join(top, '.hg')):
+                        if isdir(join(top, '.hg')):
                             continue
-                for f in names:
-                    np = util.pconvert(os.path.join(nd, f))
-                    if seen(np):
+                for f, kind, st in entries:
+                    np = pconvert(join(nd, f))
+                    if np in known:
                         continue
-                    p = os.path.join(top, f)
+                    known[np] = 1
+                    p = join(top, f)
                     # don't trip over symlinks
-                    st = os.lstat(p)
-                    if stat.S_ISDIR(st.st_mode):
+                    if kind == stat.S_IFDIR:
                         if not ignore(np):
-                            work.append(p)
+                            wadd(p)
                             if directories:
-                                yield 'd', np, st
-                        if imatch(np) and np in dc:
-                            yield 'm', np, st
+                                add((np, 'd', st))
+                        if np in dc and match(np):
+                            add((np, 'm', st))
                     elif imatch(np):
-                        if self._supported(np, st):
-                            yield 'f', np, st
+                        if supported(np, st.st_mode):
+                            add((np, 'f', st))
                         elif np in dc:
-                            yield 'm', np, st
-
-        known = {'.hg': 1}
-        def seen(fn):
-            if fn in known: return True
-            known[fn] = 1
+                            add((np, 'm', st))
+            found.sort()
+            return found
 
         # step one, find all files that match our criteria
         files.sort()
         for ff in files:
-            nf = util.normpath(ff)
-            f = self._join(ff)
+            nf = normpath(ff)
+            f = _join(ff)
             try:
-                st = os.lstat(f)
+                st = lstat(f)
             except OSError, inst:
                 found = False
                 for fn in dc:
@@ -429,15 +491,15 @@
                     elif badmatch and badmatch(ff) and imatch(nf):
                         yield 'b', ff, None
                 continue
-            if stat.S_ISDIR(st.st_mode):
-                cmp1 = (lambda x, y: cmp(x[1], y[1]))
-                sorted_ = [ x for x in findfiles(f) ]
-                sorted_.sort(cmp1)
-                for e in sorted_:
-                    yield e
+            if s_isdir(st.st_mode):
+                for f, src, st in findfiles(f):
+                    yield src, f, st
             else:
-                if not seen(nf) and match(nf):
-                    if self._supported(ff, st, verbose=True):
+                if nf in known:
+                    continue
+                known[nf] = 1
+                if match(nf):
+                    if supported(ff, st.st_mode, verbose=True):
                         yield 'f', nf, st
                     elif ff in dc:
                         yield 'm', nf, st
@@ -447,57 +509,74 @@
         ks = dc.keys()
         ks.sort()
         for k in ks:
-            if not seen(k) and imatch(k):
+            if k in known:
+                continue
+            known[k] = 1
+            if imatch(k):
                 yield 'm', k, None
 
     def status(self, files, match, list_ignored, list_clean):
         lookup, modified, added, unknown, ignored = [], [], [], [], []
         removed, deleted, clean = [], [], []
 
+        _join = self._join
+        lstat = os.lstat
+        cmap = self._copymap
+        dmap = self._map
+        ladd = lookup.append
+        madd = modified.append
+        aadd = added.append
+        uadd = unknown.append
+        iadd = ignored.append
+        radd = removed.append
+        dadd = deleted.append
+        cadd = clean.append
+
         for src, fn, st in self.statwalk(files, match, ignored=list_ignored):
-            try:
-                type_, mode, size, time = self._map[fn]
-            except KeyError:
+            if fn in dmap:
+                type_, mode, size, time, foo = dmap[fn]
+            else:
                 if list_ignored and self._ignore(fn):
-                    ignored.append(fn)
+                    iadd(fn)
                 else:
-                    unknown.append(fn)
+                    uadd(fn)
                 continue
             if src == 'm':
                 nonexistent = True
                 if not st:
                     try:
-                        st = os.lstat(self._join(fn))
+                        st = lstat(_join(fn))
                     except OSError, inst:
-                        if inst.errno != errno.ENOENT:
+                        if inst.errno not in (errno.ENOENT, errno.ENOTDIR):
                             raise
                         st = None
                     # We need to re-check that it is a valid file
-                    if st and self._supported(fn, st):
+                    if st and self._supported(fn, st.st_mode):
                         nonexistent = False
                 # XXX: what to do with file no longer present in the fs
                 # who are not removed in the dirstate ?
                 if nonexistent and type_ in "nm":
-                    deleted.append(fn)
+                    dadd(fn)
                     continue
             # check the common case first
             if type_ == 'n':
                 if not st:
-                    st = os.lstat(self._join(fn))
+                    st = lstat(_join(fn))
                 if (size >= 0 and (size != st.st_size
                                    or (mode ^ st.st_mode) & 0100)
+                    or size == -2
                     or fn in self._copymap):
-                    modified.append(fn)
+                    madd(fn)
                 elif time != int(st.st_mtime):
-                    lookup.append(fn)
+                    ladd(fn)
                 elif list_clean:
-                    clean.append(fn)
+                    cadd(fn)
             elif type_ == 'm':
-                modified.append(fn)
+                madd(fn)
             elif type_ == 'a':
-                added.append(fn)
+                aadd(fn)
             elif type_ == 'r':
-                removed.append(fn)
+                radd(fn)
 
         return (lookup, modified, added, removed, deleted, unknown, ignored,
                 clean)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/dispatch.py	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,424 @@
+# dispatch.py - command dispatching for mercurial
+#
+# Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
+#
+# This software may be used and distributed according to the terms
+# of the GNU General Public License, incorporated herein by reference.
+
+from node import *
+from i18n import _
+import os, sys, atexit, signal, pdb, traceback, socket, errno, shlex, time
+import util, commands, hg, lock, fancyopts, revlog, version, extensions, hook
+import cmdutil
+import ui as _ui
+
+class ParseError(Exception):
+    """Exception raised on errors in parsing the command line."""
+
+def run():
+    "run the command in sys.argv"
+    sys.exit(dispatch(sys.argv[1:]))
+
+def dispatch(args):
+    "run the command specified in args"
+    try:
+        u = _ui.ui(traceback='--traceback' in args)
+    except util.Abort, inst:
+        sys.stderr.write(_("abort: %s\n") % inst)
+        return -1
+    return _runcatch(u, args)
+
+def _runcatch(ui, args):
+    def catchterm(*args):
+        raise util.SignalInterrupt
+
+    for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM':
+        num = getattr(signal, name, None)
+        if num: signal.signal(num, catchterm)
+
+    try:
+        try:
+            # enter the debugger before command execution
+            if '--debugger' in args:
+                pdb.set_trace()
+            try:
+                return _dispatch(ui, args)
+            finally:
+                ui.flush()
+        except:
+            # enter the debugger when we hit an exception
+            if '--debugger' in args:
+                pdb.post_mortem(sys.exc_info()[2])
+            ui.print_exc()
+            raise
+
+    except ParseError, inst:
+        if inst.args[0]:
+            ui.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
+            commands.help_(ui, inst.args[0])
+        else:
+            ui.warn(_("hg: %s\n") % inst.args[1])
+            commands.help_(ui, 'shortlist')
+    except cmdutil.AmbiguousCommand, inst:
+        ui.warn(_("hg: command '%s' is ambiguous:\n    %s\n") %
+                (inst.args[0], " ".join(inst.args[1])))
+    except cmdutil.UnknownCommand, inst:
+        ui.warn(_("hg: unknown command '%s'\n") % inst.args[0])
+        commands.help_(ui, 'shortlist')
+    except hg.RepoError, inst:
+        ui.warn(_("abort: %s!\n") % inst)
+    except lock.LockHeld, inst:
+        if inst.errno == errno.ETIMEDOUT:
+            reason = _('timed out waiting for lock held by %s') % inst.locker
+        else:
+            reason = _('lock held by %s') % inst.locker
+        ui.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
+    except lock.LockUnavailable, inst:
+        ui.warn(_("abort: could not lock %s: %s\n") %
+               (inst.desc or inst.filename, inst.strerror))
+    except revlog.RevlogError, inst:
+        ui.warn(_("abort: %s!\n") % inst)
+    except util.SignalInterrupt:
+        ui.warn(_("killed!\n"))
+    except KeyboardInterrupt:
+        try:
+            ui.warn(_("interrupted!\n"))
+        except IOError, inst:
+            if inst.errno == errno.EPIPE:
+                if ui.debugflag:
+                    ui.warn(_("\nbroken pipe\n"))
+            else:
+                raise
+    except socket.error, inst:
+        ui.warn(_("abort: %s\n") % inst[1])
+    except IOError, inst:
+        if hasattr(inst, "code"):
+            ui.warn(_("abort: %s\n") % inst)
+        elif hasattr(inst, "reason"):
+            try: # usually it is in the form (errno, strerror)
+                reason = inst.reason.args[1]
+            except: # it might be anything, for example a string
+                reason = inst.reason
+            ui.warn(_("abort: error: %s\n") % reason)
+        elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
+            if ui.debugflag:
+                ui.warn(_("broken pipe\n"))
+        elif getattr(inst, "strerror", None):
+            if getattr(inst, "filename", None):
+                ui.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
+            else:
+                ui.warn(_("abort: %s\n") % inst.strerror)
+        else:
+            raise
+    except OSError, inst:
+        if getattr(inst, "filename", None):
+            ui.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
+        else:
+            ui.warn(_("abort: %s\n") % inst.strerror)
+    except util.UnexpectedOutput, inst:
+        ui.warn(_("abort: %s") % inst[0])
+        if not isinstance(inst[1], basestring):
+            ui.warn(" %r\n" % (inst[1],))
+        elif not inst[1]:
+            ui.warn(_(" empty string\n"))
+        else:
+            ui.warn("\n%r\n" % util.ellipsis(inst[1]))
+    except ImportError, inst:
+        m = str(inst).split()[-1]
+        ui.warn(_("abort: could not import module %s!\n") % m)
+        if m in "mpatch bdiff".split():
+            ui.warn(_("(did you forget to compile extensions?)\n"))
+        elif m in "zlib".split():
+            ui.warn(_("(is your Python install correct?)\n"))
+
+    except util.Abort, inst:
+        ui.warn(_("abort: %s\n") % inst)
+    except MemoryError:
+        ui.warn(_("abort: out of memory\n"))
+    except SystemExit, inst:
+        # Commands shouldn't sys.exit directly, but give a return code.
+        # Just in case catch this and and pass exit code to caller.
+        return inst.code
+    except:
+        ui.warn(_("** unknown exception encountered, details follow\n"))
+        ui.warn(_("** report bug details to "
+                 "http://www.selenic.com/mercurial/bts\n"))
+        ui.warn(_("** or mercurial@selenic.com\n"))
+        ui.warn(_("** Mercurial Distributed SCM (version %s)\n")
+               % version.get_version())
+        raise
+
+    return -1
+
+def _findrepo():
+    p = os.getcwd()
+    while not os.path.isdir(os.path.join(p, ".hg")):
+        oldp, p = p, os.path.dirname(p)
+        if p == oldp:
+            return None
+
+    return p
+
+def _parse(ui, args):
+    options = {}
+    cmdoptions = {}
+
+    try:
+        args = fancyopts.fancyopts(args, commands.globalopts, options)
+    except fancyopts.getopt.GetoptError, inst:
+        raise ParseError(None, inst)
+
+    if args:
+        cmd, args = args[0], args[1:]
+        aliases, i = cmdutil.findcmd(ui, cmd, commands.table)
+        cmd = aliases[0]
+        defaults = ui.config("defaults", cmd)
+        if defaults:
+            args = shlex.split(defaults) + args
+        c = list(i[1])
+    else:
+        cmd = None
+        c = []
+
+    # combine global options into local
+    for o in commands.globalopts:
+        c.append((o[0], o[1], options[o[1]], o[3]))
+
+    try:
+        args = fancyopts.fancyopts(args, c, cmdoptions)
+    except fancyopts.getopt.GetoptError, inst:
+        raise ParseError(cmd, inst)
+
+    # separate global options back out
+    for o in commands.globalopts:
+        n = o[1]
+        options[n] = cmdoptions[n]
+        del cmdoptions[n]
+
+    return (cmd, cmd and i[0] or None, args, options, cmdoptions)
+
+def _parseconfig(config):
+    """parse the --config options from the command line"""
+    parsed = []
+    for cfg in config:
+        try:
+            name, value = cfg.split('=', 1)
+            section, name = name.split('.', 1)
+            if not section or not name:
+                raise IndexError
+            parsed.append((section, name, value))
+        except (IndexError, ValueError):
+            raise util.Abort(_('malformed --config option: %s') % cfg)
+    return parsed
+
+def _earlygetopt(aliases, args):
+    """Return list of values for an option (or aliases).
+
+    The values are listed in the order they appear in args.
+    The options and values are removed from args.
+    """
+    try:
+        argcount = args.index("--")
+    except ValueError:
+        argcount = len(args)
+    shortopts = [opt for opt in aliases if len(opt) == 2]
+    values = []
+    pos = 0
+    while pos < argcount:
+        if args[pos] in aliases:
+            if pos + 1 >= argcount:
+                # ignore and let getopt report an error if there is no value
+                break
+            del args[pos]
+            values.append(args.pop(pos))
+            argcount -= 2
+        elif args[pos][:2] in shortopts:
+            # short option can have no following space, e.g. hg log -Rfoo
+            values.append(args.pop(pos)[2:])
+            argcount -= 1
+        else:
+            pos += 1
+    return values
+
+_loaded = {}
+def _dispatch(ui, args):
+    # read --config before doing anything else
+    # (e.g. to change trust settings for reading .hg/hgrc)
+    config = _earlygetopt(['--config'], args)
+    if config:
+        ui.updateopts(config=_parseconfig(config))
+
+    # check for cwd
+    cwd = _earlygetopt(['--cwd'], args)
+    if cwd:
+        os.chdir(cwd[-1])
+
+    # read the local repository .hgrc into a local ui object
+    path = _findrepo() or ""
+    if not path:
+        lui = ui
+    if path:
+        try:
+            lui = _ui.ui(parentui=ui)
+            lui.readconfig(os.path.join(path, ".hg", "hgrc"))
+        except IOError:
+            pass
+
+    # now we can expand paths, even ones in .hg/hgrc
+    rpath = _earlygetopt(["-R", "--repository", "--repo"], args)
+    if rpath:
+        path = lui.expandpath(rpath[-1])
+        lui = _ui.ui(parentui=ui)
+        lui.readconfig(os.path.join(path, ".hg", "hgrc"))
+
+    extensions.loadall(lui)
+    for name, module in extensions.extensions():
+        if name in _loaded:
+            continue
+
+        # setup extensions
+        # TODO this should be generalized to scheme, where extensions can
+        #      redepend on other extensions.  then we should toposort them, and
+        #      do initialization in correct order
+        extsetup = getattr(module, 'extsetup', None)
+        if extsetup:
+            extsetup()
+
+        cmdtable = getattr(module, 'cmdtable', {})
+        overrides = [cmd for cmd in cmdtable if cmd in commands.table]
+        if overrides:
+            ui.warn(_("extension '%s' overrides commands: %s\n")
+                    % (name, " ".join(overrides)))
+        commands.table.update(cmdtable)
+        _loaded[name] = 1
+    # check for fallback encoding
+    fallback = lui.config('ui', 'fallbackencoding')
+    if fallback:
+        util._fallbackencoding = fallback
+
+    fullargs = args
+    cmd, func, args, options, cmdoptions = _parse(lui, args)
+
+    if options["config"]:
+        raise util.Abort(_("Option --config may not be abbreviated!"))
+    if options["cwd"]:
+        raise util.Abort(_("Option --cwd may not be abbreviated!"))
+    if options["repository"]:
+        raise util.Abort(_(
+            "Option -R has to be separated from other options (i.e. not -qR) "
+            "and --repository may only be abbreviated as --repo!"))
+
+    if options["encoding"]:
+        util._encoding = options["encoding"]
+    if options["encodingmode"]:
+        util._encodingmode = options["encodingmode"]
+    if options["time"]:
+        def get_times():
+            t = os.times()
+            if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
+                t = (t[0], t[1], t[2], t[3], time.clock())
+            return t
+        s = get_times()
+        def print_time():
+            t = get_times()
+            ui.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
+                (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
+        atexit.register(print_time)
+
+    ui.updateopts(options["verbose"], options["debug"], options["quiet"],
+                 not options["noninteractive"], options["traceback"])
+
+    if options['help']:
+        return commands.help_(ui, cmd, options['version'])
+    elif options['version']:
+        return commands.version_(ui)
+    elif not cmd:
+        return commands.help_(ui, 'shortlist')
+
+    repo = None
+    if cmd not in commands.norepo.split():
+        try:
+            repo = hg.repository(ui, path=path)
+            ui = repo.ui
+            ui.setconfig("bundle", "mainreporoot", repo.root)
+            if not repo.local():
+                raise util.Abort(_("repository '%s' is not local") % path)
+        except hg.RepoError:
+            if cmd not in commands.optionalrepo.split():
+                if not path:
+                    raise hg.RepoError(_("There is no Mercurial repository here"
+                                         " (.hg not found)"))
+                raise
+        d = lambda: func(ui, repo, *args, **cmdoptions)
+    else:
+        d = lambda: func(ui, *args, **cmdoptions)
+
+    # run pre-hook, and abort if it fails
+    ret = hook.hook(lui, repo, "pre-%s" % cmd, False, args=" ".join(fullargs))
+    if ret:
+        return ret
+    ret = _runcommand(ui, options, cmd, d)
+    # run post-hook, passing command result
+    hook.hook(lui, repo, "post-%s" % cmd, False, args=" ".join(fullargs),
+              result = ret)
+    return ret
+
+def _runcommand(ui, options, cmd, cmdfunc):
+    def checkargs():
+        try:
+            return cmdfunc()
+        except TypeError, inst:
+            # was this an argument error?
+            tb = traceback.extract_tb(sys.exc_info()[2])
+            if len(tb) != 2: # no
+                raise
+            raise ParseError(cmd, _("invalid arguments"))
+    return profiled(ui, checkargs, options)
+
+def profiled(ui, func, options={}):
+    def profile_fp():
+        outfile = ui.config('profile', 'output', untrusted=True)
+        if outfile:
+            pid = str(os.getpid())
+            return open(outfile.replace('%p', pid), 'w')
+        else:
+            return sys.stderr
+    
+    if options.get('profile') or ui.config('profile', 'enable') == 'hotshot':
+        import hotshot, hotshot.stats
+        prof = hotshot.Profile("hg.prof")
+        try:
+            try:
+                return prof.runcall(checkargs)
+            except:
+                try:
+                    ui.warn(_('exception raised - generating '
+                             'profile anyway\n'))
+                except:
+                    pass
+                raise
+        finally:
+            prof.close()
+            stats = hotshot.stats.load("hg.prof")
+            stats.stream = profile_fp()
+            stats.strip_dirs()
+            stats.sort_stats('time', 'calls')
+            stats.print_stats(40)
+    elif options.get('lsprof') or ui.config('profile', 'enable') == 'lsprof':
+        try:
+            from mercurial import lsprof
+        except ImportError:
+            raise util.Abort(_(
+                'lsprof not available - install from '
+                'http://codespeak.net/svn/user/arigo/hack/misc/lsprof/'))
+        p = lsprof.Profiler()
+        p.enable(subcalls=True)
+        try:
+            return func()
+        finally:
+            p.disable()
+            stats = lsprof.Stats(p.getstats())
+            stats.sort()
+            stats.pprint(top=10, file=profile_fp(), climit=5)
+    else:
+        return func()
--- a/mercurial/extensions.py	Thu Jul 26 07:56:27 2007 -0400
+++ b/mercurial/extensions.py	Wed Feb 06 19:57:52 2008 -0800
@@ -10,8 +10,13 @@
 from i18n import _
 
 _extensions = {}
-commandtable = {}
-setuphooks = []
+_order = []
+
+def extensions():
+    for name in _order:
+        module = _extensions[name]
+        if module:
+            yield name, module
 
 def find(name):
     '''return module with given extension name'''
@@ -24,8 +29,13 @@
         raise KeyError(name)
 
 def load(ui, name, path):
-    if name in _extensions:
+    if name.startswith('hgext.'):
+        shortname = name[6:]
+    else:
+        shortname = name
+    if shortname in _extensions:
         return
+    _extensions[shortname] = None
     if path:
         # the module will be loaded in sys.modules
         # choose an unique name so that it doesn't
@@ -49,25 +59,19 @@
             mod = importh("hgext.%s" % name)
         except ImportError:
             mod = importh(name)
-    _extensions[name] = mod
+    _extensions[shortname] = mod
+    _order.append(shortname)
 
     uisetup = getattr(mod, 'uisetup', None)
     if uisetup:
         uisetup(ui)
-    reposetup = getattr(mod, 'reposetup', None)
-    if reposetup:
-        setuphooks.append(reposetup)
-    cmdtable = getattr(mod, 'cmdtable', {})
-    overrides = [cmd for cmd in cmdtable if cmd in commandtable]
-    if overrides:
-        ui.warn(_("extension '%s' overrides commands: %s\n")
-                % (name, " ".join(overrides)))
-    commandtable.update(cmdtable)
 
 def loadall(ui):
     result = ui.configitems("extensions")
     for i, (name, path) in enumerate(result):
         if path:
+            if path[0] == '!':
+                continue
             path = os.path.expanduser(path)
         try:
             load(ui, name, path)
--- a/mercurial/fancyopts.py	Thu Jul 26 07:56:27 2007 -0400
+++ b/mercurial/fancyopts.py	Wed Feb 06 19:57:52 2008 -0800
@@ -1,32 +1,73 @@
 import getopt
 
 def fancyopts(args, options, state):
-    long = []
-    short = ''
-    map = {}
-    dt = {}
+    """
+    read args, parse options, and store options in state
+
+    each option is a tuple of:
+
+      short option or ''
+      long option
+      default value
+      description
+
+    option types include:
+
+      boolean or none - option sets variable in state to true
+      string - parameter string is stored in state
+      list - parameter string is added to a list
+      integer - parameter strings is stored as int
+      function - call function with parameter
+
+    non-option args are returned
+    """
+    namelist = []
+    shortlist = ''
+    argmap = {}
+    defmap = {}
+
+    for short, name, default, comment in options:
+        # convert opts to getopt format
+        oname = name
+        name = name.replace('-', '_')
+
+        argmap['-' + short] = argmap['--' + oname] = name
+        defmap[name] = default
 
-    for s, l, d, c in options:
-        pl = l.replace('-', '_')
-        map['-'+s] = map['--'+l] = pl
-        state[pl] = d
-        dt[pl] = type(d)
-        if (d is not None and d is not True and d is not False and
-            not callable(d)):
-            if s: s += ':'
-            if l: l += '='
-        if s: short = short + s
-        if l: long.append(l)
+        # copy defaults to state
+        if isinstance(default, list):
+            state[name] = default[:]
+        elif callable(default):
+            state[name] = None
+        else:
+            state[name] = default
+
+        # does it take a parameter?
+        if not (default is None or default is True or default is False):
+            if short: short += ':'
+            if oname: oname += '='
+        if short:
+            shortlist += short
+        if name:
+            namelist.append(oname)
 
-    opts, args = getopt.getopt(args, short, long)
+    # parse arguments
+    opts, args = getopt.getopt(args, shortlist, namelist)
 
-    for opt, arg in opts:
-        if dt[map[opt]] is type(fancyopts): state[map[opt]](state, map[opt], arg)
-        elif dt[map[opt]] is type(1): state[map[opt]] = int(arg)
-        elif dt[map[opt]] is type(''): state[map[opt]] = arg
-        elif dt[map[opt]] is type([]): state[map[opt]].append(arg)
-        elif dt[map[opt]] is type(None): state[map[opt]] = True
-        elif dt[map[opt]] is type(False): state[map[opt]] = True
+    # transfer result to state
+    for opt, val in opts:
+        name = argmap[opt]
+        t = type(defmap[name])
+        if t is type(fancyopts):
+            state[name] = defmap[name](val)
+        elif t is type(1):
+            state[name] = int(val)
+        elif t is type(''):
+            state[name] = val
+        elif t is type([]):
+            state[name].append(val)
+        elif t is type(None) or t is type(False):
+            state[name] = True
 
+    # return unparsed args
     return args
-
--- a/mercurial/filelog.py	Thu Jul 26 07:56:27 2007 -0400
+++ b/mercurial/filelog.py	Wed Feb 06 19:57:52 2008 -0800
@@ -58,7 +58,7 @@
         if self.parents(node)[0] != nullid:
             return False
         m = self._readmeta(node)
-        if m and m.has_key("copy"):
+        if m and "copy" in m:
             return (m["copy"], bin(m["copyrev"]))
         return False
 
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/filemerge.py	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,206 @@
+# filemerge.py - file-level merge handling for Mercurial
+#
+# Copyright 2006, 2007, 2008 Matt Mackall <mpm@selenic.com>
+#
+# This software may be used and distributed according to the terms
+# of the GNU General Public License, incorporated herein by reference.
+
+from node import *
+from i18n import _
+import util, os, tempfile, context, simplemerge, re
+
+def _toolstr(ui, tool, part, default=""):
+    return ui.config("merge-tools", tool + "." + part, default)
+
+def _toolbool(ui, tool, part, default=False):
+    return ui.configbool("merge-tools", tool + "." + part, default)
+
+def _findtool(ui, tool):
+    k = _toolstr(ui, tool, "regkey")
+    if k:
+        p = util.lookup_reg(k, _toolstr(ui, tool, "regname"))
+        if p:
+            p = util.find_exe(p + _toolstr(ui, tool, "regappend"))
+            if p:
+                return p
+    return util.find_exe(_toolstr(ui, tool, "executable", tool))
+
+def _picktool(repo, ui, path, binary, symlink):
+    def check(tool, pat, symlink, binary):
+        tmsg = tool
+        if pat:
+            tmsg += " specified for " + pat
+        if pat and not _findtool(ui, tool): # skip search if not matching
+            ui.warn(_("couldn't find merge tool %s\n") % tmsg)
+        elif symlink and not _toolbool(ui, tool, "symlink"):
+            ui.warn(_("tool %s can't handle symlinks\n") % tmsg)
+        elif binary and not _toolbool(ui, tool, "binary"):
+            ui.warn(_("tool %s can't handle binary\n") % tmsg)
+        elif not util.gui() and _toolbool(ui, tool, "gui"):
+            ui.warn(_("tool %s requires a GUI\n") % tmsg)
+        else:
+            return True
+        return False
+
+    # HGMERGE takes precedence
+    hgmerge = os.environ.get("HGMERGE")
+    if hgmerge:
+        return (hgmerge, hgmerge)
+
+    # then patterns
+    for pat, tool in ui.configitems("merge-patterns"):
+        mf = util.matcher(repo.root, "", [pat], [], [])[1]
+        if mf(path) and check(tool, pat, symlink, False):
+                toolpath = _findtool(ui, tool)
+                return (tool, '"' + toolpath + '"')
+
+    # then merge tools
+    tools = {}
+    for k,v in ui.configitems("merge-tools"):
+        t = k.split('.')[0]
+        if t not in tools:
+            tools[t] = int(_toolstr(ui, t, "priority", "0"))
+    tools = [(-p,t) for t,p in tools.items()]
+    tools.sort()
+    if ui.config("ui", "merge"):
+        tools.insert(0, (None, ui.config("ui", "merge"))) # highest priority
+    tools.append((None, "hgmerge")) # the old default, if found
+    for p,t in tools:
+        toolpath = _findtool(ui, t)
+        if toolpath and check(t, None, symlink, binary):
+            return (t, '"' + toolpath + '"')
+    # internal merge as last resort
+    return (not (symlink or binary) and "internal:merge" or None, None)
+
+def _eoltype(data):
+    "Guess the EOL type of a file"
+    if '\0' in data: # binary
+        return None
+    if '\r\n' in data: # Windows
+        return '\r\n'
+    if '\r' in data: # Old Mac
+        return '\r'
+    if '\n' in data: # UNIX
+        return '\n'
+    return None # unknown
+
+def _matcheol(file, origfile):
+    "Convert EOL markers in a file to match origfile"
+    tostyle = _eoltype(open(origfile, "rb").read())
+    if tostyle:
+        data = open(file, "rb").read()
+        style = _eoltype(data)
+        if style:
+            newdata = data.replace(style, tostyle)
+            if newdata != data:
+                open(file, "wb").write(newdata)
+
+def filemerge(repo, fw, fd, fo, wctx, mctx):
+    """perform a 3-way merge in the working directory
+
+    fw = original filename in the working directory
+    fd = destination filename in the working directory
+    fo = filename in other parent
+    wctx, mctx = working and merge changecontexts
+    """
+
+    def temp(prefix, ctx):
+        pre = "%s~%s." % (os.path.basename(ctx.path()), prefix)
+        (fd, name) = tempfile.mkstemp(prefix=pre)
+        data = repo.wwritedata(ctx.path(), ctx.data())
+        f = os.fdopen(fd, "wb")
+        f.write(data)
+        f.close()
+        return name
+
+    def isbin(ctx):
+        try:
+            return util.binary(ctx.data())
+        except IOError:
+            return False
+
+    fco = mctx.filectx(fo)
+    if not fco.cmp(wctx.filectx(fd).data()): # files identical?
+        return None
+
+    ui = repo.ui
+    fcm = wctx.filectx(fw)
+    fca = fcm.ancestor(fco) or repo.filectx(fw, fileid=nullrev)
+    binary = isbin(fcm) or isbin(fco) or isbin(fca)
+    symlink = fcm.islink() or fco.islink()
+    tool, toolpath = _picktool(repo, ui, fw, binary, symlink)
+    ui.debug(_("picked tool '%s' for %s (binary %s symlink %s)\n") %
+               (tool, fw, binary, symlink))
+
+    if not tool:
+        tool = "internal:local"
+        if ui.prompt(_(" no tool found to merge %s\n"
+                       "keep (l)ocal or take (o)ther?") % fw,
+                     _("[lo]"), _("l")) != _("l"):
+            tool = "internal:other"
+    if tool == "internal:local":
+        return 0
+    if tool == "internal:other":
+        repo.wwrite(fd, fco.data(), fco.fileflags())
+        return 0
+    if tool == "internal:fail":
+        return 1
+
+    # do the actual merge
+    a = repo.wjoin(fd)
+    b = temp("base", fca)
+    c = temp("other", fco)
+    out = ""
+    back = a + ".orig"
+    util.copyfile(a, back)
+
+    if fw != fo:
+        repo.ui.status(_("merging %s and %s\n") % (fw, fo))
+    else:
+        repo.ui.status(_("merging %s\n") % fw)
+    repo.ui.debug(_("my %s other %s ancestor %s\n") % (fcm, fco, fca))
+
+    # do we attempt to simplemerge first?
+    if _toolbool(ui, tool, "premerge", not (binary or symlink)):
+        r = simplemerge.simplemerge(a, b, c, quiet=True)
+        if not r:
+            ui.debug(_(" premerge successful\n"))
+            os.unlink(back)
+            os.unlink(b)
+            os.unlink(c)
+            return 0
+        util.copyfile(back, a) # restore from backup and try again
+
+    env = dict(HG_FILE=fd,
+               HG_MY_NODE=str(wctx.parents()[0]),
+               HG_OTHER_NODE=str(mctx),
+               HG_MY_ISLINK=fcm.islink(),
+               HG_OTHER_ISLINK=fco.islink(),
+               HG_BASE_ISLINK=fca.islink())
+
+    if tool == "internal:merge":
+        r = simplemerge.simplemerge(a, b, c, label=['local', 'other'])
+    else:
+        args = _toolstr(ui, tool, "args", '$local $base $other')
+        if "$output" in args:
+            out, a = a, back # read input from backup, write to original
+        replace = dict(local=a, base=b, other=c, output=out)
+        args = re.sub("\$(local|base|other|output)",
+                      lambda x: '"%s"' % replace[x.group()[1:]], args)
+        r = util.system(toolpath + ' ' + args, cwd=repo.root, environ=env)
+
+    if not r and _toolbool(ui, tool, "checkconflicts"):
+        if re.match("^(<<<<<<< .*|=======|>>>>>>> .*)$", fcm.data()):
+            r = 1
+
+    if _toolbool(ui, tool, "fixeol"):
+        _matcheol(repo.wjoin(fd), back)
+
+    if r:
+        repo.ui.warn(_("merging %s failed!\n") % fd)
+    else:
+        os.unlink(back)
+
+    os.unlink(b)
+    os.unlink(c)
+    return r
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/hbisect.py	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,105 @@
+# changelog bisection for mercurial
+#
+# Copyright 2007 Matt Mackall
+# Copyright 2005, 2006 Benoit Boissinot <benoit.boissinot@ens-lyon.org>
+# Inspired by git bisect, extension skeleton taken from mq.py.
+#
+# This software may be used and distributed according to the terms
+# of the GNU General Public License, incorporated herein by reference.
+
+from i18n import _
+import hg, util
+
+def bisect(changelog, state):
+    clparents = changelog.parentrevs
+    skip = dict.fromkeys([changelog.rev(n) for n in state['skip']])
+
+    def buildancestors(bad, good):
+        # only the earliest bad revision matters
+        badrev = min([changelog.rev(n) for n in bad])
+        goodrevs = [changelog.rev(n) for n in good]
+        # build ancestors array
+        ancestors = [[]] * (changelog.count() + 1) # an extra for [-1]
+
+        # clear good revs from array
+        for node in goodrevs:
+            ancestors[node] = None
+        for rev in xrange(changelog.count(), -1, -1):
+            if ancestors[rev] is None:
+                for prev in clparents(rev):
+                    ancestors[prev] = None
+
+        if ancestors[badrev] is None:
+            return badrev, None
+        return badrev, ancestors
+
+    good = 0
+    badrev, ancestors = buildancestors(state['bad'], state['good'])
+    if not ancestors: # looking for bad to good transition?
+        good = 1
+        badrev, ancestors = buildancestors(state['good'], state['bad'])
+    bad = changelog.node(badrev)
+    if not ancestors: # now we're confused
+        raise util.Abort(_("Inconsistent state, %s:%s is good and bad")
+                         % (badrev, hg.short(bad)))
+
+    # build children dict
+    children = {}
+    visit = [badrev]
+    candidates = []
+    while visit:
+        rev = visit.pop(0)
+        if ancestors[rev] == []:
+            candidates.append(rev)
+            for prev in clparents(rev):
+                if prev != -1:
+                    if prev in children:
+                        children[prev].append(rev)
+                    else:
+                        children[prev] = [rev]
+                        visit.append(prev)
+
+    candidates.sort()
+    # have we narrowed it down to one entry?
+    tot = len(candidates)
+    if tot == 1:
+        return (bad, 0, good)
+    perfect = tot / 2
+
+    # find the best node to test
+    best_rev = None
+    best_len = -1
+    poison = {}
+    for rev in candidates:
+        if rev in poison:
+            for c in children.get(rev, []):
+                poison[c] = True # poison children
+            continue
+
+        a = ancestors[rev] or [rev]
+        ancestors[rev] = None
+
+        x = len(a) # number of ancestors
+        y = tot - x # number of non-ancestors
+        value = min(x, y) # how good is this test?
+        if value > best_len and rev not in skip:
+            best_len = value
+            best_rev = rev
+            if value == perfect: # found a perfect candidate? quit early
+                break
+
+        if y < perfect: # all downhill from here?
+            for c in children.get(rev, []):
+                poison[c] = True # poison children
+            continue
+
+        for c in children.get(rev, []):
+            if ancestors[c]:
+                ancestors[c] = dict.fromkeys(ancestors[c] + a).keys()
+            else:
+                ancestors[c] = a + [c]
+
+    assert best_rev is not None
+    best_node = changelog.node(best_rev)
+
+    return (best_node, tot, good)
--- a/mercurial/help.py	Thu Jul 26 07:56:27 2007 -0400
+++ b/mercurial/help.py	Wed Feb 06 19:57:52 2008 -0800
@@ -38,13 +38,12 @@
     'environment|env|Environment Variables':
     r'''
 HG::
-    Path to the 'hg' executable, automatically passed when running hooks
-    or external tools. Falls back to 'hg' if unset and the value can't be
-    autodetected, e.g. when Mercurial is run as a Python module.
+    Path to the 'hg' executable, automatically passed when running hooks,
+    extensions or external tools. If unset or empty, an executable named
+    'hg' (with com/exe/bat/cmd extension on Windows) is searched.
 
 HGEDITOR::
-    This is the name of the editor to use when committing. Defaults to the
-    value of EDITOR.
+    This is the name of the editor to use when committing. See EDITOR.
 
     (deprecated, use .hgrc)
 
@@ -67,9 +66,6 @@
     will be executed with three arguments: local file, remote file,
     ancestor file.
 
-    The default program is "hgmerge", which is a shell script provided
-    by Mercurial with some sensible defaults.
-
     (deprecated, use .hgrc)
 
 HGRCPATH::
@@ -94,9 +90,16 @@
     If neither HGUSER nor EMAIL is set, LOGNAME will be used (with
     '@hostname' appended) as the author value for a commit.
 
+VISUAL::
+    This is the name of the editor to use when committing. See EDITOR.
+
 EDITOR::
-    This is the name of the editor used in the hgmerge script. It will be
-    used for commit messages if HGEDITOR isn't set. Defaults to 'vi'.
+    Sometimes Mercurial needs to open a text file in an editor
+    for a user to modify, for example when writing commit messages.
+    The editor it uses is determined by looking at the environment
+    variables HGEDITOR, VISUAL and EDITOR, in that order. The first
+    non-empty one is chosen. If all of them are empty, the editor
+    defaults to 'vi'.
 
 PYTHONPATH::
     This is used by Python to find imported modules and may need to be set
--- a/mercurial/hg.py	Thu Jul 26 07:56:27 2007 -0400
+++ b/mercurial/hg.py	Wed Feb 06 19:57:52 2008 -0800
@@ -10,7 +10,7 @@
 from repo import *
 from i18n import _
 import localrepo, bundlerepo, httprepo, sshrepo, statichttprepo
-import errno, lock, os, shutil, util, cmdutil, extensions
+import errno, lock, os, shutil, util, extensions
 import merge as _merge
 import verify as _verify
 
@@ -18,6 +18,15 @@
     return (os.path.isfile(util.drop_scheme('file', path)) and
             bundlerepo or localrepo)
 
+def parseurl(url, revs):
+    '''parse url#branch, returning url, branch + revs'''
+
+    if '#' not in url:
+        return url, (revs or None), None
+
+    url, rev = url.split('#', 1)
+    return url, revs + [rev], rev
+
 schemes = {
     'bundle': bundlerepo,
     'file': _local,
@@ -52,8 +61,10 @@
     """return a repository object for the specified path"""
     repo = _lookup(path).instance(ui, path, create)
     ui = getattr(repo, "ui", ui)
-    for hook in extensions.setuphooks:
-        hook(ui, repo)
+    for name, module in extensions.extensions():
+        hook = getattr(module, 'reposetup', None)
+        if hook:
+            hook(ui, repo)
     return repo
 
 def defaultdest(source):
@@ -95,7 +106,7 @@
     """
 
     origsource = source
-    source, rev = cmdutil.parseurl(ui.expandpath(source), rev)
+    source, rev, checkout = parseurl(ui.expandpath(source), rev)
 
     if isinstance(source, str):
         src_repo = repository(ui, source)
@@ -138,7 +149,7 @@
         abspath = origsource
         copy = False
         if src_repo.local() and islocal(dest):
-            abspath = os.path.abspath(origsource)
+            abspath = os.path.abspath(util.drop_scheme('file', origsource))
             copy = not pull and not rev
 
         if copy:
@@ -153,18 +164,28 @@
 
         if copy:
             def force_copy(src, dst):
-                try:
-                    util.copyfiles(src, dst)
-                except OSError, inst:
-                    if inst.errno != errno.ENOENT:
-                        raise
+                if not os.path.exists(src):
+                    # Tolerate empty source repository and optional files
+                    return
+                util.copyfiles(src, dst)
 
             src_store = os.path.realpath(src_repo.spath)
             if not os.path.exists(dest):
                 os.mkdir(dest)
-            dest_path = os.path.realpath(os.path.join(dest, ".hg"))
-            os.mkdir(dest_path)
+            try:
+                dest_path = os.path.realpath(os.path.join(dest, ".hg"))
+                os.mkdir(dest_path)
+            except OSError, inst:
+                if inst.errno == errno.EEXIST:
+                    dir_cleanup.close()
+                    raise util.Abort(_("destination '%s' already exists")
+                                     % dest)
+                raise
             if src_repo.spath != src_repo.path:
+                # XXX racy
+                dummy_changelog = os.path.join(dest_path, "00changelog.i")
+                # copy the dummy changelog
+                force_copy(src_repo.join("00changelog.i"), dummy_changelog)
                 dest_store = os.path.join(dest_path, "store")
                 os.mkdir(dest_store)
             else:
@@ -188,7 +209,14 @@
             dest_repo = repository(ui, dest)
 
         else:
-            dest_repo = repository(ui, dest, create=True)
+            try:
+                dest_repo = repository(ui, dest, create=True)
+            except OSError, inst:
+                if inst.errno == errno.EEXIST:
+                    dir_cleanup.close()
+                    raise util.Abort(_("destination '%s' already exists")
+                                     % dest)
+                raise
 
             revs = None
             if rev:
@@ -205,6 +233,9 @@
             else:
                 raise util.Abort(_("clone from remote to remote not supported"))
 
+        if dir_cleanup:
+            dir_cleanup.close()
+
         if dest_repo.local():
             fp = dest_repo.opener("hgrc", "w", text=True)
             fp.write("[paths]\n")
@@ -212,13 +243,12 @@
             fp.close()
 
             if update:
-                try:
-                    checkout = dest_repo.lookup("default")
-                except:
-                    checkout = dest_repo.changelog.tip()
+                if not checkout:
+                    try:
+                        checkout = dest_repo.lookup("default")
+                    except:
+                        checkout = dest_repo.changelog.tip()
                 _update(dest_repo, checkout)
-        if dir_cleanup:
-            dir_cleanup.close()
 
         return src_repo, dest_repo
     finally:
@@ -249,13 +279,13 @@
         # len(pl)==1, otherwise _merge.update() would have raised util.Abort:
         repo.ui.status(_("  hg update %s\n  hg update %s\n")
                        % (pl[0].rev(), repo.changectx(node).rev()))
-    return stats[3]
+    return stats[3] > 0
 
 def clean(repo, node, show_stats=True):
     """forcibly switch the working directory to node, clobbering changes"""
     stats = _merge.update(repo, node, False, True, None)
     if show_stats: _showstats(repo, stats)
-    return stats[3]
+    return stats[3] > 0
 
 def merge(repo, node, force=None, remind=True):
     """branch merge with node, resolving changes"""
@@ -270,11 +300,11 @@
                        % (pl[0].rev(), pl[1].rev()))
     elif remind:
         repo.ui.status(_("(branch merge, don't forget to commit)\n"))
-    return stats[3]
+    return stats[3] > 0
 
 def revert(repo, node, choose):
     """revert changes to revision in node without updating dirstate"""
-    return _merge.update(repo, node, False, True, choose)[3]
+    return _merge.update(repo, node, False, True, choose)[3] > 0
 
 def verify(repo):
     """verify the consistency of a repository"""
--- a/mercurial/hgweb/common.py	Thu Jul 26 07:56:27 2007 -0400
+++ b/mercurial/hgweb/common.py	Wed Feb 06 19:57:52 2008 -0800
@@ -6,7 +6,29 @@
 # This software may be used and distributed according to the terms
 # of the GNU General Public License, incorporated herein by reference.
 
-import os, mimetypes
+import errno, mimetypes, os
+
+HTTP_OK = 200
+HTTP_BAD_REQUEST = 400
+HTTP_NOT_FOUND = 404
+HTTP_SERVER_ERROR = 500
+
+class ErrorResponse(Exception):
+    def __init__(self, code, message=None):
+        Exception.__init__(self)
+        self.code = code
+        if message:
+            self.message = message
+        else:
+            self.message = _statusmessage(code)
+
+def _statusmessage(code):
+    from BaseHTTPServer import BaseHTTPRequestHandler
+    responses = BaseHTTPRequestHandler.responses
+    return responses.get(code, ('Error', 'Unknown error'))[0]
+
+def statusmessage(code):
+    return '%d %s' % (code, _statusmessage(code))
 
 def get_mtime(repo_path):
     store_path = os.path.join(repo_path, ".hg")
@@ -19,11 +41,11 @@
         return os.stat(store_path).st_mtime
 
 def staticfile(directory, fname, req):
-    """return a file inside directory with guessed content-type header
+    """return a file inside directory with guessed Content-Type header
 
     fname always uses '/' as directory separator and isn't allowed to
     contain unusual path components.
-    Content-type is guessed using the mimetypes module.
+    Content-Type is guessed using the mimetypes module.
     Return an empty string if fname is illegal or file not found.
 
     """
@@ -37,12 +59,15 @@
     try:
         os.stat(path)
         ct = mimetypes.guess_type(path)[0] or "text/plain"
-        req.header([('Content-type', ct),
-                    ('Content-length', str(os.path.getsize(path)))])
+        req.respond(HTTP_OK, ct, length = os.path.getsize(path))
         return file(path, 'rb').read()
-    except (TypeError, OSError):
-        # illegal fname or unreadable file
-        return ""
+    except TypeError:
+        raise ErrorResponse(HTTP_SERVER_ERROR, 'illegal file name')
+    except OSError, err:
+        if err.errno == errno.ENOENT:
+            raise ErrorResponse(HTTP_NOT_FOUND)
+        else:
+            raise ErrorResponse(HTTP_SERVER_ERROR, err.strerror)
 
 def style_map(templatepath, style):
     """Return path to mapfile for a given style.
@@ -76,3 +101,12 @@
             parity = 1 - parity
             count = 0
 
+def get_contact(config):
+    """Return repo contact information or empty string.
+
+    web.contact is the primary source, but if that is not set, try
+    ui.username or $EMAIL as a fallback to display something useful.
+    """
+    return (config("web", "contact") or
+            config("ui", "username") or
+            os.environ.get("EMAIL") or "")
--- a/mercurial/hgweb/hgweb_mod.py	Thu Jul 26 07:56:27 2007 -0400
+++ b/mercurial/hgweb/hgweb_mod.py	Wed Feb 06 19:57:52 2008 -0800
@@ -6,13 +6,29 @@
 # This software may be used and distributed according to the terms
 # of the GNU General Public License, incorporated herein by reference.
 
-import os, mimetypes, re, zlib, mimetools, cStringIO, sys
-import tempfile, urllib, bz2
+import os, mimetypes, re
 from mercurial.node import *
-from mercurial.i18n import gettext as _
-from mercurial import mdiff, ui, hg, util, archival, streamclone, patch
-from mercurial import revlog, templater
-from common import get_mtime, staticfile, style_map, paritygen
+from mercurial import mdiff, ui, hg, util, archival, patch, hook
+from mercurial import revlog, templater, templatefilters
+from common import ErrorResponse, get_mtime, style_map, paritygen, get_contact
+from common import HTTP_OK, HTTP_BAD_REQUEST, HTTP_NOT_FOUND, HTTP_SERVER_ERROR
+from request import wsgirequest
+import webcommands, protocol
+
+shortcuts = {
+    'cl': [('cmd', ['changelog']), ('rev', None)],
+    'sl': [('cmd', ['shortlog']), ('rev', None)],
+    'cs': [('cmd', ['changeset']), ('node', None)],
+    'f': [('cmd', ['file']), ('filenode', None)],
+    'fl': [('cmd', ['filelog']), ('filenode', None)],
+    'fd': [('cmd', ['filediff']), ('node', None)],
+    'fa': [('cmd', ['annotate']), ('filenode', None)],
+    'mf': [('cmd', ['manifest']), ('manifest', None)],
+    'ca': [('cmd', ['archive']), ('node', None)],
+    'tags': [('cmd', ['tags'])],
+    'tip': [('cmd', ['changeset']), ('node', ['tip'])],
+    'static': [('cmd', ['static']), ('file', None)]
+}
 
 def _up(p):
     if p[0] != "/":
@@ -63,12 +79,15 @@
     return nav
 
 class hgweb(object):
-    def __init__(self, repo, name=None):
+    def __init__(self, repo, name=None, parentui=None):
         if isinstance(repo, str):
-            self.repo = hg.repository(ui.ui(report_untrusted=False), repo)
+            parentui = (parentui or
+                        ui.ui(report_untrusted=False, interactive=False))
+            self.repo = hg.repository(parentui, repo)
         else:
             self.repo = repo
 
+        hook.redirect(True)
         self.mtime = -1
         self.reponame = name
         self.archives = 'zip', 'gz', 'bz2'
@@ -105,17 +124,201 @@
             self.allowpull = self.configbool("web", "allowpull", True)
             self.encoding = self.config("web", "encoding", util._encoding)
 
+    def run(self):
+        if not os.environ.get('GATEWAY_INTERFACE', '').startswith("CGI/1."):
+            raise RuntimeError("This function is only intended to be called while running as a CGI script.")
+        import mercurial.hgweb.wsgicgi as wsgicgi
+        wsgicgi.launch(self)
+
+    def __call__(self, env, respond):
+        req = wsgirequest(env, respond)
+        self.run_wsgi(req)
+        return req
+
+    def run_wsgi(self, req):
+
+        self.refresh()
+
+        # expand form shortcuts
+
+        for k in shortcuts.iterkeys():
+            if k in req.form:
+                for name, value in shortcuts[k]:
+                    if value is None:
+                        value = req.form[k]
+                    req.form[name] = value
+                del req.form[k]
+
+        # work with CGI variables to create coherent structure
+        # use SCRIPT_NAME, PATH_INFO and QUERY_STRING as well as our REPO_NAME
+
+        req.url = req.env['SCRIPT_NAME']
+        if not req.url.endswith('/'):
+            req.url += '/'
+        if 'REPO_NAME' in req.env:
+            req.url += req.env['REPO_NAME'] + '/'
+
+        if req.env.get('PATH_INFO'):
+            parts = req.env.get('PATH_INFO').strip('/').split('/')
+            repo_parts = req.env.get('REPO_NAME', '').split('/')
+            if parts[:len(repo_parts)] == repo_parts:
+                parts = parts[len(repo_parts):]
+            query = '/'.join(parts)
+        else:
+            query = req.env['QUERY_STRING'].split('&', 1)[0]
+            query = query.split(';', 1)[0]
+
+        # translate user-visible url structure to internal structure
+
+        args = query.split('/', 2)
+        if 'cmd' not in req.form and args and args[0]:
+
+            cmd = args.pop(0)
+            style = cmd.rfind('-')
+            if style != -1:
+                req.form['style'] = [cmd[:style]]
+                cmd = cmd[style+1:]
+
+            # avoid accepting e.g. style parameter as command
+            if hasattr(webcommands, cmd) or hasattr(protocol, cmd):
+                req.form['cmd'] = [cmd]
+
+            if args and args[0]:
+                node = args.pop(0)
+                req.form['node'] = [node]
+            if args:
+                req.form['file'] = args
+
+            if cmd == 'static':
+                req.form['file'] = req.form['node']
+            elif cmd == 'archive':
+                fn = req.form['node'][0]
+                for type_, spec in self.archive_specs.iteritems():
+                    ext = spec[2]
+                    if fn.endswith(ext):
+                        req.form['node'] = [fn[:-len(ext)]]
+                        req.form['type'] = [type_]
+
+        # actually process the request
+
+        try:
+
+            cmd = req.form.get('cmd', [''])[0]
+            if cmd in protocol.__all__:
+                method = getattr(protocol, cmd)
+                method(self, req)
+            else:
+                tmpl = self.templater(req)
+                ctype = tmpl('mimetype', encoding=self.encoding)
+                ctype = templater.stringify(ctype)
+                
+                if cmd == '':
+                    req.form['cmd'] = [tmpl.cache['default']]
+                    cmd = req.form['cmd'][0]
+
+                if cmd not in webcommands.__all__:
+                    msg = 'No such method: %s' % cmd
+                    raise ErrorResponse(HTTP_BAD_REQUEST, msg)
+                elif cmd == 'file' and 'raw' in req.form.get('style', []):
+                    self.ctype = ctype
+                    content = webcommands.rawfile(self, req, tmpl)
+                else:
+                    content = getattr(webcommands, cmd)(self, req, tmpl)
+                    req.respond(HTTP_OK, ctype)
+
+                req.write(content)
+                del tmpl
+
+        except revlog.LookupError, err:
+            req.respond(HTTP_NOT_FOUND, ctype)
+            req.write(tmpl('error', error='revision not found: %s' % err.name))
+        except (hg.RepoError, revlog.RevlogError), inst:
+            req.respond(HTTP_SERVER_ERROR, ctype)
+            req.write(tmpl('error', error=str(inst)))
+        except ErrorResponse, inst:
+            req.respond(inst.code, ctype)
+            req.write(tmpl('error', error=inst.message))
+
+    def templater(self, req):
+
+        # determine scheme, port and server name
+        # this is needed to create absolute urls
+
+        proto = req.env.get('wsgi.url_scheme')
+        if proto == 'https':
+            proto = 'https'
+            default_port = "443"
+        else:
+            proto = 'http'
+            default_port = "80"
+
+        port = req.env["SERVER_PORT"]
+        port = port != default_port and (":" + port) or ""
+        urlbase = '%s://%s%s' % (proto, req.env['SERVER_NAME'], port)
+        staticurl = self.config("web", "staticurl") or req.url + 'static/'
+        if not staticurl.endswith('/'):
+            staticurl += '/'
+
+        # some functions for the templater
+
+        def header(**map):
+            yield tmpl('header', encoding=self.encoding, **map)
+
+        def footer(**map):
+            yield tmpl("footer", **map)
+
+        def motd(**map):
+            yield self.config("web", "motd", "")
+
+        def sessionvars(**map):
+            fields = []
+            if 'style' in req.form:
+                style = req.form['style'][0]
+                if style != self.config('web', 'style', ''):
+                    fields.append(('style', style))
+
+            separator = req.url[-1] == '?' and ';' or '?'
+            for name, value in fields:
+                yield dict(name=name, value=value, separator=separator)
+                separator = ';'
+
+        # figure out which style to use
+
+        style = self.config("web", "style", "")
+        if 'style' in req.form:
+            style = req.form['style'][0]
+        mapfile = style_map(self.templatepath, style)
+
+        if not self.reponame:
+            self.reponame = (self.config("web", "name")
+                             or req.env.get('REPO_NAME')
+                             or req.url.strip('/') or self.repo.root)
+
+        # create the templater
+
+        tmpl = templater.templater(mapfile, templatefilters.filters,
+                                   defaults={"url": req.url,
+                                             "staticurl": staticurl,
+                                             "urlbase": urlbase,
+                                             "repo": self.reponame,
+                                             "header": header,
+                                             "footer": footer,
+                                             "motd": motd,
+                                             "sessionvars": sessionvars
+                                            })
+        return tmpl
+
     def archivelist(self, nodeid):
         allowed = self.configlist("web", "allow_archive")
         for i, spec in self.archive_specs.iteritems():
             if i in allowed or self.configbool("web", "allow" + i):
                 yield {"type" : i, "extension" : spec[2], "node" : nodeid}
 
-    def listfilediffs(self, files, changeset):
+    def listfilediffs(self, tmpl, files, changeset):
         for f in files[:self.maxfiles]:
-            yield self.t("filedifflink", node=hex(changeset), file=f)
+            yield tmpl("filedifflink", node=hex(changeset), file=f)
         if len(files) > self.maxfiles:
-            yield self.t("fileellipses")
+            yield tmpl("fileellipses")
 
     def siblings(self, siblings=[], hiderev=None, **args):
         siblings = [s for s in siblings if s.node() != nullid]
@@ -140,15 +343,18 @@
     def nodebranchdict(self, ctx):
         branches = []
         branch = ctx.branch()
-        if self.repo.branchtags()[branch] == ctx.node():
+        # If this is an empty repo, ctx.node() == nullid,
+        # ctx.branch() == 'default', but branchtags() is
+        # an empty dict. Using dict.get avoids a traceback.
+        if self.repo.branchtags().get(branch) == ctx.node():
             branches.append({"name": branch})
         return branches
 
-    def showtag(self, t1, node=nullid, **args):
+    def showtag(self, tmpl, t1, node=nullid, **args):
         for t in self.repo.nodetags(node):
-            yield self.t(t1, tag=t, **args)
+            yield tmpl(t1, tag=t, **args)
 
-    def diff(self, node1, node2, files):
+    def diff(self, tmpl, node1, node2, files):
         def filterfiles(filters, files):
             l = [x for x in files if x in filters]
 
@@ -160,22 +366,22 @@
 
         parity = paritygen(self.stripecount)
         def diffblock(diff, f, fn):
-            yield self.t("diffblock",
-                         lines=prettyprintlines(diff),
-                         parity=parity.next(),
-                         file=f,
-                         filenode=hex(fn or nullid))
+            yield tmpl("diffblock",
+                       lines=prettyprintlines(diff),
+                       parity=parity.next(),
+                       file=f,
+                       filenode=hex(fn or nullid))
 
         def prettyprintlines(diff):
             for l in diff.splitlines(1):
                 if l.startswith('+'):
-                    yield self.t("difflineplus", line=l)
+                    yield tmpl("difflineplus", line=l)
                 elif l.startswith('-'):
-                    yield self.t("difflineminus", line=l)
+                    yield tmpl("difflineminus", line=l)
                 elif l.startswith('@'):
-                    yield self.t("difflineat", line=l)
+                    yield tmpl("difflineat", line=l)
                 else:
-                    yield self.t("diffline", line=l)
+                    yield tmpl("diffline", line=l)
 
         r = self.repo
         c1 = r.changectx(node1)
@@ -192,21 +398,21 @@
         for f in modified:
             to = c1.filectx(f).data()
             tn = c2.filectx(f).data()
-            yield diffblock(mdiff.unidiff(to, date1, tn, date2, f,
+            yield diffblock(mdiff.unidiff(to, date1, tn, date2, f, f,
                                           opts=diffopts), f, tn)
         for f in added:
             to = None
             tn = c2.filectx(f).data()
-            yield diffblock(mdiff.unidiff(to, date1, tn, date2, f,
+            yield diffblock(mdiff.unidiff(to, date1, tn, date2, f, f,
                                           opts=diffopts), f, tn)
         for f in removed:
             to = c1.filectx(f).data()
             tn = None
-            yield diffblock(mdiff.unidiff(to, date1, tn, date2, f,
+            yield diffblock(mdiff.unidiff(to, date1, tn, date2, f, f,
                                           opts=diffopts), f, tn)
 
-    def changelog(self, ctx, shortlog=False):
-        def changelist(**map):
+    def changelog(self, tmpl, ctx, shortlog=False):
+        def changelist(limit=0,**map):
             cl = self.repo.changelog
             l = [] # build a list in forward order for efficiency
             for i in xrange(start, end):
@@ -220,12 +426,15 @@
                              "changelogtag": self.showtag("changelogtag",n),
                              "desc": ctx.description(),
                              "date": ctx.date(),
-                             "files": self.listfilediffs(ctx.files(), n),
+                             "files": self.listfilediffs(tmpl, ctx.files(), n),
                              "rev": i,
                              "node": hex(n),
                              "tags": self.nodetagsdict(n),
                              "branches": self.nodebranchdict(ctx)})
 
+            if limit > 0:
+                l = l[:limit]
+
             for e in l:
                 yield e
 
@@ -240,13 +449,15 @@
 
         changenav = revnavgen(pos, maxchanges, count, self.repo.changectx)
 
-        yield self.t(shortlog and 'shortlog' or 'changelog',
-                     changenav=changenav,
-                     node=hex(cl.tip()),
-                     rev=pos, changesets=count, entries=changelist,
-                     archives=self.archivelist("tip"))
+        return tmpl(shortlog and 'shortlog' or 'changelog',
+                    changenav=changenav,
+                    node=hex(cl.tip()),
+                    rev=pos, changesets=count,
+                    entries=lambda **x: changelist(limit=0,**x),
+                    latestentry=lambda **x: changelist(limit=1,**x),
+                    archives=self.archivelist("tip"))
 
-    def search(self, query):
+    def search(self, tmpl, query):
 
         def changelist(**map):
             cl = self.repo.changelog
@@ -256,7 +467,7 @@
             def revgen():
                 for i in xrange(cl.count() - 1, 0, -100):
                     l = []
-                    for j in xrange(max(0, i - 100), i):
+                    for j in xrange(max(0, i - 100), i + 1):
                         ctx = self.repo.changectx(j)
                         l.append(ctx)
                     l.reverse()
@@ -277,19 +488,19 @@
                 count += 1
                 n = ctx.node()
 
-                yield self.t('searchentry',
-                             parity=parity.next(),
-                             author=ctx.user(),
-                             parent=self.siblings(ctx.parents()),
-                             child=self.siblings(ctx.children()),
-                             changelogtag=self.showtag("changelogtag",n),
-                             desc=ctx.description(),
-                             date=ctx.date(),
-                             files=self.listfilediffs(ctx.files(), n),
-                             rev=ctx.rev(),
-                             node=hex(n),
-                             tags=self.nodetagsdict(n),
-                             branches=self.nodebranchdict(ctx))
+                yield tmpl('searchentry',
+                           parity=parity.next(),
+                           author=ctx.user(),
+                           parent=self.siblings(ctx.parents()),
+                           child=self.siblings(ctx.children()),
+                           changelogtag=self.showtag("changelogtag",n),
+                           desc=ctx.description(),
+                           date=ctx.date(),
+                           files=self.listfilediffs(tmpl, ctx.files(), n),
+                           rev=ctx.rev(),
+                           node=hex(n),
+                           tags=self.nodetagsdict(n),
+                           branches=self.nodebranchdict(ctx))
 
                 if count >= self.maxchanges:
                     break
@@ -297,13 +508,13 @@
         cl = self.repo.changelog
         parity = paritygen(self.stripecount)
 
-        yield self.t('search',
-                     query=query,
-                     node=hex(cl.tip()),
-                     entries=changelist,
-                     archives=self.archivelist("tip"))
+        return tmpl('search',
+                    query=query,
+                    node=hex(cl.tip()),
+                    entries=changelist,
+                    archives=self.archivelist("tip"))
 
-    def changeset(self, ctx):
+    def changeset(self, tmpl, ctx):
         n = ctx.node()
         parents = ctx.parents()
         p1 = parents[0].node()
@@ -311,29 +522,29 @@
         files = []
         parity = paritygen(self.stripecount)
         for f in ctx.files():
-            files.append(self.t("filenodelink",
-                                node=hex(n), file=f,
-                                parity=parity.next()))
+            files.append(tmpl("filenodelink",
+                              node=hex(n), file=f,
+                              parity=parity.next()))
 
         def diff(**map):
-            yield self.diff(p1, n, None)
+            yield self.diff(tmpl, p1, n, None)
 
-        yield self.t('changeset',
-                     diff=diff,
-                     rev=ctx.rev(),
-                     node=hex(n),
-                     parent=self.siblings(parents),
-                     child=self.siblings(ctx.children()),
-                     changesettag=self.showtag("changesettag",n),
-                     author=ctx.user(),
-                     desc=ctx.description(),
-                     date=ctx.date(),
-                     files=files,
-                     archives=self.archivelist(hex(n)),
-                     tags=self.nodetagsdict(n),
-                     branches=self.nodebranchdict(ctx))
+        return tmpl('changeset',
+                    diff=diff,
+                    rev=ctx.rev(),
+                    node=hex(n),
+                    parent=self.siblings(parents),
+                    child=self.siblings(ctx.children()),
+                    changesettag=self.showtag("changesettag",n),
+                    author=ctx.user(),
+                    desc=ctx.description(),
+                    date=ctx.date(),
+                    files=files,
+                    archives=self.archivelist(hex(n)),
+                    tags=self.nodetagsdict(n),
+                    branches=self.nodebranchdict(ctx))
 
-    def filelog(self, fctx):
+    def filelog(self, tmpl, fctx):
         f = fctx.path()
         fl = fctx.filelog()
         count = fl.count()
@@ -344,7 +555,7 @@
         pos = end - 1
         parity = paritygen(self.stripecount, offset=start-end)
 
-        def entries(**map):
+        def entries(limit=0, **map):
             l = []
 
             for i in xrange(start, end):
@@ -362,27 +573,28 @@
                              "child": self.siblings(fctx.children()),
                              "desc": ctx.description()})
 
+            if limit > 0:
+                l = l[:limit]
+
             for e in l:
                 yield e
 
         nodefunc = lambda x: fctx.filectx(fileid=x)
         nav = revnavgen(pos, pagelen, count, nodefunc)
-        yield self.t("filelog", file=f, node=hex(fctx.node()), nav=nav,
-                     entries=entries)
+        return tmpl("filelog", file=f, node=hex(fctx.node()), nav=nav,
+                    entries=lambda **x: entries(limit=0, **x),
+                    latestentry=lambda **x: entries(limit=1, **x))
 
-    def filerevision(self, fctx):
+    def filerevision(self, tmpl, fctx):
         f = fctx.path()
         text = fctx.data()
         fl = fctx.filelog()
         n = fctx.filenode()
         parity = paritygen(self.stripecount)
 
-        mt = mimetypes.guess_type(f)[0]
-        rawtext = text
         if util.binary(text):
-            mt = mt or 'application/octet-stream'
-            text = "(binary:%s)" % mt
-        mt = mt or 'text/plain'
+            mt = mimetypes.guess_type(f)[0] or 'application/octet-stream'
+            text = '(binary:%s)' % mt
 
         def lines():
             for l, t in enumerate(text.splitlines(1)):
@@ -390,23 +602,21 @@
                        "linenumber": "% 6d" % (l + 1),
                        "parity": parity.next()}
 
-        yield self.t("filerevision",
-                     file=f,
-                     path=_up(f),
-                     text=lines(),
-                     raw=rawtext,
-                     mimetype=mt,
-                     rev=fctx.rev(),
-                     node=hex(fctx.node()),
-                     author=fctx.user(),
-                     date=fctx.date(),
-                     desc=fctx.description(),
-                     parent=self.siblings(fctx.parents()),
-                     child=self.siblings(fctx.children()),
-                     rename=self.renamelink(fl, n),
-                     permissions=fctx.manifest().flags(f))
+        return tmpl("filerevision",
+                    file=f,
+                    path=_up(f),
+                    text=lines(),
+                    rev=fctx.rev(),
+                    node=hex(fctx.node()),
+                    author=fctx.user(),
+                    date=fctx.date(),
+                    desc=fctx.description(),
+                    parent=self.siblings(fctx.parents()),
+                    child=self.siblings(fctx.children()),
+                    rename=self.renamelink(fl, n),
+                    permissions=fctx.manifest().flags(f))
 
-    def fileannotate(self, fctx):
+    def fileannotate(self, tmpl, fctx):
         f = fctx.path()
         n = fctx.filenode()
         fl = fctx.filelog()
@@ -428,21 +638,21 @@
                        "file": f.path(),
                        "line": l}
 
-        yield self.t("fileannotate",
-                     file=f,
-                     annotate=annotate,
-                     path=_up(f),
-                     rev=fctx.rev(),
-                     node=hex(fctx.node()),
-                     author=fctx.user(),
-                     date=fctx.date(),
-                     desc=fctx.description(),
-                     rename=self.renamelink(fl, n),
-                     parent=self.siblings(fctx.parents()),
-                     child=self.siblings(fctx.children()),
-                     permissions=fctx.manifest().flags(f))
+        return tmpl("fileannotate",
+                    file=f,
+                    annotate=annotate,
+                    path=_up(f),
+                    rev=fctx.rev(),
+                    node=hex(fctx.node()),
+                    author=fctx.user(),
+                    date=fctx.date(),
+                    desc=fctx.description(),
+                    rename=self.renamelink(fl, n),
+                    parent=self.siblings(fctx.parents()),
+                    child=self.siblings(fctx.children()),
+                    permissions=fctx.manifest().flags(f))
 
-    def manifest(self, ctx, path):
+    def manifest(self, tmpl, ctx, path):
         mf = ctx.manifest()
         node = ctx.node()
 
@@ -465,6 +675,9 @@
                 short = os.path.basename(remain)
                 files[short] = (f, n)
 
+        if not files:
+            raise ErrorResponse(HTTP_NOT_FOUND, 'Path not found: ' + path)
+
         def filelist(**map):
             fl = files.keys()
             fl.sort()
@@ -473,10 +686,12 @@
                 if not fnode:
                     continue
 
+                fctx = ctx.filectx(full)
                 yield {"file": full,
                        "parity": parity.next(),
                        "basename": f,
-                       "size": ctx.filectx(full).size(),
+                       "date": fctx.changectx().date(),
+                       "size": fctx.size(),
                        "permissions": mf.flags(full)}
 
         def dirlist(**map):
@@ -488,41 +703,46 @@
                     continue
 
                 yield {"parity": parity.next(),
-                       "path": os.path.join(abspath, f),
+                       "path": "%s%s" % (abspath, f),
                        "basename": f[:-1]}
 
-        yield self.t("manifest",
-                     rev=ctx.rev(),
-                     node=hex(node),
-                     path=abspath,
-                     up=_up(abspath),
-                     upparity=parity.next(),
-                     fentries=filelist,
-                     dentries=dirlist,
-                     archives=self.archivelist(hex(node)),
-                     tags=self.nodetagsdict(node),
-                     branches=self.nodebranchdict(ctx))
+        return tmpl("manifest",
+                    rev=ctx.rev(),
+                    node=hex(node),
+                    path=abspath,
+                    up=_up(abspath),
+                    upparity=parity.next(),
+                    fentries=filelist,
+                    dentries=dirlist,
+                    archives=self.archivelist(hex(node)),
+                    tags=self.nodetagsdict(node),
+                    branches=self.nodebranchdict(ctx))
 
-    def tags(self):
+    def tags(self, tmpl):
         i = self.repo.tagslist()
         i.reverse()
         parity = paritygen(self.stripecount)
 
-        def entries(notip=False, **map):
+        def entries(notip=False,limit=0, **map):
+            count = 0
             for k, n in i:
                 if notip and k == "tip":
                     continue
+                if limit > 0 and count >= limit:
+                    continue
+                count = count + 1
                 yield {"parity": parity.next(),
                        "tag": k,
                        "date": self.repo.changectx(n).date(),
                        "node": hex(n)}
 
-        yield self.t("tags",
-                     node=hex(self.repo.changelog.tip()),
-                     entries=lambda **x: entries(False, **x),
-                     entriesnotip=lambda **x: entries(True, **x))
+        return tmpl("tags",
+                    node=hex(self.repo.changelog.tip()),
+                    entries=lambda **x: entries(False,0, **x),
+                    entriesnotip=lambda **x: entries(True,0, **x),
+                    latestentry=lambda **x: entries(True,1, **x))
 
-    def summary(self):
+    def summary(self, tmpl):
         i = self.repo.tagslist()
         i.reverse()
 
@@ -537,11 +757,11 @@
                 if count > 10: # limit to 10 tags
                     break;
 
-                yield self.t("tagentry",
-                             parity=parity.next(),
-                             tag=k,
-                             node=hex(n),
-                             date=self.repo.changectx(n).date())
+                yield tmpl("tagentry",
+                           parity=parity.next(),
+                           tag=k,
+                           node=hex(n),
+                           date=self.repo.changectx(n).date())
 
 
         def branches(**map):
@@ -567,8 +787,8 @@
                 n = ctx.node()
                 hn = hex(n)
 
-                l.insert(0, self.t(
-                    'shortlogentry',
+                l.insert(0, tmpl(
+                   'shortlogentry',
                     parity=parity.next(),
                     author=ctx.user(),
                     desc=ctx.description(),
@@ -585,34 +805,32 @@
         start = max(0, count - self.maxchanges)
         end = min(count, start + self.maxchanges)
 
-        yield self.t("summary",
-                 desc=self.config("web", "description", "unknown"),
-                 owner=(self.config("ui", "username") or # preferred
-                        self.config("web", "contact") or # deprecated
-                        self.config("web", "author", "unknown")), # also
-                 lastchange=cl.read(cl.tip())[2],
-                 tags=tagentries,
-                 branches=branches,
-                 shortlog=changelist,
-                 node=hex(cl.tip()),
-                 archives=self.archivelist("tip"))
+        return tmpl("summary",
+                    desc=self.config("web", "description", "unknown"),
+                    owner=get_contact(self.config) or "unknown",
+                    lastchange=cl.read(cl.tip())[2],
+                    tags=tagentries,
+                    branches=branches,
+                    shortlog=changelist,
+                    node=hex(cl.tip()),
+                    archives=self.archivelist("tip"))
 
-    def filediff(self, fctx):
+    def filediff(self, tmpl, fctx):
         n = fctx.node()
         path = fctx.path()
         parents = fctx.parents()
         p1 = parents and parents[0].node() or nullid
 
         def diff(**map):
-            yield self.diff(p1, n, [path])
+            yield self.diff(tmpl, p1, n, [path])
 
-        yield self.t("filediff",
-                     file=path,
-                     node=hex(n),
-                     rev=fctx.rev(),
-                     parent=self.siblings(parents),
-                     child=self.siblings(fctx.children()),
-                     diff=diff)
+        return tmpl("filediff",
+                    file=path,
+                    node=hex(n),
+                    rev=fctx.rev(),
+                    parent=self.siblings(parents),
+                    child=self.siblings(fctx.children()),
+                    diff=diff)
 
     archive_specs = {
         'bz2': ('application/x-tar', 'tbz2', '.tar.bz2', None),
@@ -620,7 +838,7 @@
         'zip': ('application/zip', 'zip', '.zip', None),
         }
 
-    def archive(self, req, key, type_):
+    def archive(self, tmpl, req, key, type_):
         reponame = re.sub(r"\W+", "-", os.path.basename(self.reponame))
         cnode = self.repo.lookup(key)
         arch_version = key
@@ -628,13 +846,16 @@
             arch_version = short(cnode)
         name = "%s-%s" % (reponame, arch_version)
         mimetype, artype, extension, encoding = self.archive_specs[type_]
-        headers = [('Content-type', mimetype),
-                   ('Content-disposition', 'attachment; filename=%s%s' %
-                    (name, extension))]
+        headers = [
+            ('Content-Type', mimetype),
+            ('Content-Disposition', 'attachment; filename=%s%s' %
+                (name, extension))
+        ]
         if encoding:
-            headers.append(('Content-encoding', encoding))
+            headers.append(('Content-Encoding', encoding))
         req.header(headers)
-        archival.archive(self.repo, req.out, cnode, artype, prefix=name)
+        req.respond(HTTP_OK)
+        archival.archive(self.repo, req, cnode, artype, prefix=name)
 
     # add tags to things
     # tags -> list of changesets corresponding to tags
@@ -644,202 +865,10 @@
         path = path.lstrip('/')
         return util.canonpath(self.repo.root, '', path)
 
-    def run(self):
-        if not os.environ.get('GATEWAY_INTERFACE', '').startswith("CGI/1."):
-            raise RuntimeError("This function is only intended to be called while running as a CGI script.")
-        import mercurial.hgweb.wsgicgi as wsgicgi
-        from request import wsgiapplication
-        def make_web_app():
-            return self
-        wsgicgi.launch(wsgiapplication(make_web_app))
-
-    def run_wsgi(self, req):
-        def header(**map):
-            header_file = cStringIO.StringIO(
-                ''.join(self.t("header", encoding=self.encoding, **map)))
-            msg = mimetools.Message(header_file, 0)
-            req.header(msg.items())
-            yield header_file.read()
-
-        def rawfileheader(**map):
-            req.header([('Content-type', map['mimetype']),
-                        ('Content-disposition', 'filename=%s' % map['file']),
-                        ('Content-length', str(len(map['raw'])))])
-            yield ''
-
-        def footer(**map):
-            yield self.t("footer", **map)
-
-        def motd(**map):
-            yield self.config("web", "motd", "")
-
-        def expand_form(form):
-            shortcuts = {
-                'cl': [('cmd', ['changelog']), ('rev', None)],
-                'sl': [('cmd', ['shortlog']), ('rev', None)],
-                'cs': [('cmd', ['changeset']), ('node', None)],
-                'f': [('cmd', ['file']), ('filenode', None)],
-                'fl': [('cmd', ['filelog']), ('filenode', None)],
-                'fd': [('cmd', ['filediff']), ('node', None)],
-                'fa': [('cmd', ['annotate']), ('filenode', None)],
-                'mf': [('cmd', ['manifest']), ('manifest', None)],
-                'ca': [('cmd', ['archive']), ('node', None)],
-                'tags': [('cmd', ['tags'])],
-                'tip': [('cmd', ['changeset']), ('node', ['tip'])],
-                'static': [('cmd', ['static']), ('file', None)]
-            }
-
-            for k in shortcuts.iterkeys():
-                if form.has_key(k):
-                    for name, value in shortcuts[k]:
-                        if value is None:
-                            value = form[k]
-                        form[name] = value
-                    del form[k]
-
-        def rewrite_request(req):
-            '''translate new web interface to traditional format'''
-
-            def spliturl(req):
-                def firstitem(query):
-                    return query.split('&', 1)[0].split(';', 1)[0]
-
-                def normurl(url):
-                    inner = '/'.join([x for x in url.split('/') if x])
-                    tl = len(url) > 1 and url.endswith('/') and '/' or ''
-
-                    return '%s%s%s' % (url.startswith('/') and '/' or '',
-                                       inner, tl)
-
-                root = normurl(urllib.unquote(req.env.get('REQUEST_URI', '').split('?', 1)[0]))
-                pi = normurl(req.env.get('PATH_INFO', ''))
-                if pi:
-                    # strip leading /
-                    pi = pi[1:]
-                    if pi:
-                        root = root[:root.rfind(pi)]
-                    if req.env.has_key('REPO_NAME'):
-                        rn = req.env['REPO_NAME'] + '/'
-                        root += rn
-                        query = pi[len(rn):]
-                    else:
-                        query = pi
-                else:
-                    root += '?'
-                    query = firstitem(req.env['QUERY_STRING'])
-
-                return (root, query)
-
-            req.url, query = spliturl(req)
-
-            if req.form.has_key('cmd'):
-                # old style
-                return
-
-            args = query.split('/', 2)
-            if not args or not args[0]:
-                return
-
-            cmd = args.pop(0)
-            style = cmd.rfind('-')
-            if style != -1:
-                req.form['style'] = [cmd[:style]]
-                cmd = cmd[style+1:]
-            # avoid accepting e.g. style parameter as command
-            if hasattr(self, 'do_' + cmd):
-                req.form['cmd'] = [cmd]
-
-            if args and args[0]:
-                node = args.pop(0)
-                req.form['node'] = [node]
-            if args:
-                req.form['file'] = args
-
-            if cmd == 'static':
-                req.form['file'] = req.form['node']
-            elif cmd == 'archive':
-                fn = req.form['node'][0]
-                for type_, spec in self.archive_specs.iteritems():
-                    ext = spec[2]
-                    if fn.endswith(ext):
-                        req.form['node'] = [fn[:-len(ext)]]
-                        req.form['type'] = [type_]
-
-        def sessionvars(**map):
-            fields = []
-            if req.form.has_key('style'):
-                style = req.form['style'][0]
-                if style != self.config('web', 'style', ''):
-                    fields.append(('style', style))
-
-            separator = req.url[-1] == '?' and ';' or '?'
-            for name, value in fields:
-                yield dict(name=name, value=value, separator=separator)
-                separator = ';'
-
-        self.refresh()
-
-        expand_form(req.form)
-        rewrite_request(req)
-
-        style = self.config("web", "style", "")
-        if req.form.has_key('style'):
-            style = req.form['style'][0]
-        mapfile = style_map(self.templatepath, style)
-
-        proto = req.env.get('wsgi.url_scheme')
-        if proto == 'https':
-            proto = 'https'
-            default_port = "443"
-        else:
-            proto = 'http'
-            default_port = "80"
-
-        port = req.env["SERVER_PORT"]
-        port = port != default_port and (":" + port) or ""
-        urlbase = '%s://%s%s' % (proto, req.env['SERVER_NAME'], port)
-        staticurl = self.config("web", "staticurl") or req.url + 'static/'
-        if not staticurl.endswith('/'):
-            staticurl += '/'
-
-        if not self.reponame:
-            self.reponame = (self.config("web", "name")
-                             or req.env.get('REPO_NAME')
-                             or req.url.strip('/') or self.repo.root)
-
-        self.t = templater.templater(mapfile, templater.common_filters,
-                                     defaults={"url": req.url,
-                                               "staticurl": staticurl,
-                                               "urlbase": urlbase,
-                                               "repo": self.reponame,
-                                               "header": header,
-                                               "footer": footer,
-                                               "motd": motd,
-                                               "rawfileheader": rawfileheader,
-                                               "sessionvars": sessionvars
-                                               })
-
-        try:
-            if not req.form.has_key('cmd'):
-                req.form['cmd'] = [self.t.cache['default']]
-
-            cmd = req.form['cmd'][0]
-
-            method = getattr(self, 'do_' + cmd, None)
-            if method:
-                try:
-                    method(req)
-                except (hg.RepoError, revlog.RevlogError), inst:
-                    req.write(self.t("error", error=str(inst)))
-            else:
-                req.write(self.t("error", error='No such method: ' + cmd))
-        finally:
-            self.t = None
-
     def changectx(self, req):
-        if req.form.has_key('node'):
+        if 'node' in req.form:
             changeid = req.form['node'][0]
-        elif req.form.has_key('manifest'):
+        elif 'manifest' in req.form:
             changeid = req.form['manifest'][0]
         else:
             changeid = self.repo.changelog.count() - 1
@@ -855,7 +884,7 @@
 
     def filectx(self, req):
         path = self.cleanpath(req.form['file'][0])
-        if req.form.has_key('node'):
+        if 'node' in req.form:
             changeid = req.form['node'][0]
         else:
             changeid = req.form['filenode'][0]
@@ -867,181 +896,6 @@
 
         return fctx
 
-    def do_log(self, req):
-        if req.form.has_key('file') and req.form['file'][0]:
-            self.do_filelog(req)
-        else:
-            self.do_changelog(req)
-
-    def do_rev(self, req):
-        self.do_changeset(req)
-
-    def do_file(self, req):
-        path = self.cleanpath(req.form.get('file', [''])[0])
-        if path:
-            try:
-                req.write(self.filerevision(self.filectx(req)))
-                return
-            except revlog.LookupError:
-                pass
-
-        req.write(self.manifest(self.changectx(req), path))
-
-    def do_diff(self, req):
-        self.do_filediff(req)
-
-    def do_changelog(self, req, shortlog = False):
-        if req.form.has_key('node'):
-            ctx = self.changectx(req)
-        else:
-            if req.form.has_key('rev'):
-                hi = req.form['rev'][0]
-            else:
-                hi = self.repo.changelog.count() - 1
-            try:
-                ctx = self.repo.changectx(hi)
-            except hg.RepoError:
-                req.write(self.search(hi)) # XXX redirect to 404 page?
-                return
-
-        req.write(self.changelog(ctx, shortlog = shortlog))
-
-    def do_shortlog(self, req):
-        self.do_changelog(req, shortlog = True)
-
-    def do_changeset(self, req):
-        req.write(self.changeset(self.changectx(req)))
-
-    def do_manifest(self, req):
-        req.write(self.manifest(self.changectx(req),
-                                self.cleanpath(req.form['path'][0])))
-
-    def do_tags(self, req):
-        req.write(self.tags())
-
-    def do_summary(self, req):
-        req.write(self.summary())
-
-    def do_filediff(self, req):
-        req.write(self.filediff(self.filectx(req)))
-
-    def do_annotate(self, req):
-        req.write(self.fileannotate(self.filectx(req)))
-
-    def do_filelog(self, req):
-        req.write(self.filelog(self.filectx(req)))
-
-    def do_lookup(self, req):
-        try:
-            r = hex(self.repo.lookup(req.form['key'][0]))
-            success = 1
-        except Exception,inst:
-            r = str(inst)
-            success = 0
-        resp = "%s %s\n" % (success, r)
-        req.httphdr("application/mercurial-0.1", length=len(resp))
-        req.write(resp)
-
-    def do_heads(self, req):
-        resp = " ".join(map(hex, self.repo.heads())) + "\n"
-        req.httphdr("application/mercurial-0.1", length=len(resp))
-        req.write(resp)
-
-    def do_branches(self, req):
-        nodes = []
-        if req.form.has_key('nodes'):
-            nodes = map(bin, req.form['nodes'][0].split(" "))
-        resp = cStringIO.StringIO()
-        for b in self.repo.branches(nodes):
-            resp.write(" ".join(map(hex, b)) + "\n")
-        resp = resp.getvalue()
-        req.httphdr("application/mercurial-0.1", length=len(resp))
-        req.write(resp)
-
-    def do_between(self, req):
-        if req.form.has_key('pairs'):
-            pairs = [map(bin, p.split("-"))
-                     for p in req.form['pairs'][0].split(" ")]
-        resp = cStringIO.StringIO()
-        for b in self.repo.between(pairs):
-            resp.write(" ".join(map(hex, b)) + "\n")
-        resp = resp.getvalue()
-        req.httphdr("application/mercurial-0.1", length=len(resp))
-        req.write(resp)
-
-    def do_changegroup(self, req):
-        req.httphdr("application/mercurial-0.1")
-        nodes = []
-        if not self.allowpull:
-            return
-
-        if req.form.has_key('roots'):
-            nodes = map(bin, req.form['roots'][0].split(" "))
-
-        z = zlib.compressobj()
-        f = self.repo.changegroup(nodes, 'serve')
-        while 1:
-            chunk = f.read(4096)
-            if not chunk:
-                break
-            req.write(z.compress(chunk))
-
-        req.write(z.flush())
-
-    def do_changegroupsubset(self, req):
-        req.httphdr("application/mercurial-0.1")
-        bases = []
-        heads = []
-        if not self.allowpull:
-            return
-
-        if req.form.has_key('bases'):
-            bases = [bin(x) for x in req.form['bases'][0].split(' ')]
-        if req.form.has_key('heads'):
-            heads = [bin(x) for x in req.form['heads'][0].split(' ')]
-
-        z = zlib.compressobj()
-        f = self.repo.changegroupsubset(bases, heads, 'serve')
-        while 1:
-            chunk = f.read(4096)
-            if not chunk:
-                break
-            req.write(z.compress(chunk))
-
-        req.write(z.flush())
-
-    def do_archive(self, req):
-        type_ = req.form['type'][0]
-        allowed = self.configlist("web", "allow_archive")
-        if (type_ in self.archives and (type_ in allowed or
-            self.configbool("web", "allow" + type_, False))):
-            self.archive(req, req.form['node'][0], type_)
-            return
-
-        req.write(self.t("error"))
-
-    def do_static(self, req):
-        fname = req.form['file'][0]
-        # a repo owner may set web.static in .hg/hgrc to get any file
-        # readable by the user running the CGI script
-        static = self.config("web", "static",
-                             os.path.join(self.templatepath, "static"),
-                             untrusted=False)
-        req.write(staticfile(static, fname, req)
-                  or self.t("error", error="%r not found" % fname))
-
-    def do_capabilities(self, req):
-        caps = ['lookup', 'changegroupsubset']
-        if self.configbool('server', 'uncompressed'):
-            caps.append('stream=%d' % self.repo.changelog.version)
-        # XXX: make configurable and/or share code with do_unbundle:
-        unbundleversions = ['HG10GZ', 'HG10BZ', 'HG10UN']
-        if unbundleversions:
-            caps.append('unbundle=%s' % ','.join(unbundleversions))
-        resp = ' '.join(caps)
-        req.httphdr("application/mercurial-0.1", length=len(resp))
-        req.write(resp)
-
     def check_perm(self, req, op, default):
         '''check permission for operation based on user auth.
         return true if op allowed, else false.
@@ -1055,134 +909,3 @@
 
         allow = self.configlist('web', 'allow_' + op)
         return (allow and (allow == ['*'] or user in allow)) or default
-
-    def do_unbundle(self, req):
-        def bail(response, headers={}):
-            length = int(req.env['CONTENT_LENGTH'])
-            for s in util.filechunkiter(req, limit=length):
-                # drain incoming bundle, else client will not see
-                # response when run outside cgi script
-                pass
-            req.httphdr("application/mercurial-0.1", headers=headers)
-            req.write('0\n')
-            req.write(response)
-
-        # require ssl by default, auth info cannot be sniffed and
-        # replayed
-        ssl_req = self.configbool('web', 'push_ssl', True)
-        if ssl_req:
-            if req.env.get('wsgi.url_scheme') != 'https':
-                bail(_('ssl required\n'))
-                return
-            proto = 'https'
-        else:
-            proto = 'http'
-
-        # do not allow push unless explicitly allowed
-        if not self.check_perm(req, 'push', False):
-            bail(_('push not authorized\n'),
-                 headers={'status': '401 Unauthorized'})
-            return
-
-        their_heads = req.form['heads'][0].split(' ')
-
-        def check_heads():
-            heads = map(hex, self.repo.heads())
-            return their_heads == [hex('force')] or their_heads == heads
-
-        # fail early if possible
-        if not check_heads():
-            bail(_('unsynced changes\n'))
-            return
-
-        req.httphdr("application/mercurial-0.1")
-
-        # do not lock repo until all changegroup data is
-        # streamed. save to temporary file.
-
-        fd, tempname = tempfile.mkstemp(prefix='hg-unbundle-')
-        fp = os.fdopen(fd, 'wb+')
-        try:
-            length = int(req.env['CONTENT_LENGTH'])
-            for s in util.filechunkiter(req, limit=length):
-                fp.write(s)
-
-            try:
-                lock = self.repo.lock()
-                try:
-                    if not check_heads():
-                        req.write('0\n')
-                        req.write(_('unsynced changes\n'))
-                        return
-
-                    fp.seek(0)
-                    header = fp.read(6)
-                    if not header.startswith("HG"):
-                        # old client with uncompressed bundle
-                        def generator(f):
-                            yield header
-                            for chunk in f:
-                                yield chunk
-                    elif not header.startswith("HG10"):
-                        req.write("0\n")
-                        req.write(_("unknown bundle version\n"))
-                        return
-                    elif header == "HG10GZ":
-                        def generator(f):
-                            zd = zlib.decompressobj()
-                            for chunk in f:
-                                yield zd.decompress(chunk)
-                    elif header == "HG10BZ":
-                        def generator(f):
-                            zd = bz2.BZ2Decompressor()
-                            zd.decompress("BZ")
-                            for chunk in f:
-                                yield zd.decompress(chunk)
-                    elif header == "HG10UN":
-                        def generator(f):
-                            for chunk in f:
-                                yield chunk
-                    else:
-                        req.write("0\n")
-                        req.write(_("unknown bundle compression type\n"))
-                        return
-                    gen = generator(util.filechunkiter(fp, 4096))
-
-                    # send addchangegroup output to client
-
-                    old_stdout = sys.stdout
-                    sys.stdout = cStringIO.StringIO()
-
-                    try:
-                        url = 'remote:%s:%s' % (proto,
-                                                req.env.get('REMOTE_HOST', ''))
-                        try:
-                            ret = self.repo.addchangegroup(
-                                        util.chunkbuffer(gen), 'serve', url)
-                        except util.Abort, inst:
-                            sys.stdout.write("abort: %s\n" % inst)
-                            ret = 0
-                    finally:
-                        val = sys.stdout.getvalue()
-                        sys.stdout = old_stdout
-                    req.write('%d\n' % ret)
-                    req.write(val)
-                finally:
-                    del lock
-            except (OSError, IOError), inst:
-                req.write('0\n')
-                filename = getattr(inst, 'filename', '')
-                # Don't send our filesystem layout to the client
-                if filename.startswith(self.repo.root):
-                    filename = filename[len(self.repo.root)+1:]
-                else:
-                    filename = ''
-                error = getattr(inst, 'strerror', 'Unknown error')
-                req.write('%s: %s\n' % (error, filename))
-        finally:
-            fp.close()
-            os.unlink(tempname)
-
-    def do_stream_out(self, req):
-        req.httphdr("application/mercurial-0.1")
-        streamclone.stream_out(self.repo, req, untrusted=True)
--- a/mercurial/hgweb/hgwebdir_mod.py	Thu Jul 26 07:56:27 2007 -0400
+++ b/mercurial/hgweb/hgwebdir_mod.py	Wed Feb 06 19:57:52 2008 -0800
@@ -6,20 +6,23 @@
 # This software may be used and distributed according to the terms
 # of the GNU General Public License, incorporated herein by reference.
 
-from mercurial import demandimport; demandimport.enable()
-import os, mimetools, cStringIO
+import os
 from mercurial.i18n import gettext as _
-from mercurial import ui, hg, util, templater
-from common import get_mtime, staticfile, style_map, paritygen
+from mercurial import ui, hg, util, templater, templatefilters
+from common import ErrorResponse, get_mtime, staticfile, style_map, paritygen,\
+                   get_contact, HTTP_OK, HTTP_NOT_FOUND, HTTP_SERVER_ERROR
 from hgweb_mod import hgweb
+from request import wsgirequest
 
 # This is a stopgap
 class hgwebdir(object):
     def __init__(self, config, parentui=None):
         def cleannames(items):
-            return [(name.strip(os.sep), path) for name, path in items]
+            return [(util.pconvert(name).strip('/'), path)
+                    for name, path in items]
 
-        self.parentui = parentui
+        self.parentui = parentui or ui.ui(report_untrusted=False,
+                                          interactive = False)
         self.motd = None
         self.style = None
         self.stripecount = None
@@ -60,59 +63,79 @@
         if not os.environ.get('GATEWAY_INTERFACE', '').startswith("CGI/1."):
             raise RuntimeError("This function is only intended to be called while running as a CGI script.")
         import mercurial.hgweb.wsgicgi as wsgicgi
-        from request import wsgiapplication
-        def make_web_app():
-            return self
-        wsgicgi.launch(wsgiapplication(make_web_app))
+        wsgicgi.launch(self)
+
+    def __call__(self, env, respond):
+        req = wsgirequest(env, respond)
+        self.run_wsgi(req)
+        return req
 
     def run_wsgi(self, req):
-        def header(**map):
-            header_file = cStringIO.StringIO(
-                ''.join(tmpl("header", encoding=util._encoding, **map)))
-            msg = mimetools.Message(header_file, 0)
-            req.header(msg.items())
-            yield header_file.read()
+
+        try:
+            try:
 
-        def footer(**map):
-            yield tmpl("footer", **map)
+                virtual = req.env.get("PATH_INFO", "").strip('/')
+                tmpl = self.templater(req)
+                ctype = tmpl('mimetype', encoding=util._encoding)
+                ctype = templater.stringify(ctype)
 
-        def motd(**map):
-            if self.motd is not None:
-                yield self.motd
-            else:
-                yield config('web', 'motd', '')
+                # a static file
+                if virtual.startswith('static/') or 'static' in req.form:
+                    static = os.path.join(templater.templatepath(), 'static')
+                    if virtual.startswith('static/'):
+                        fname = virtual[7:]
+                    else:
+                        fname = req.form['static'][0]
+                    req.write(staticfile(static, fname, req))
+                    return
 
-        parentui = self.parentui or ui.ui(report_untrusted=False)
-
-        def config(section, name, default=None, untrusted=True):
-            return parentui.config(section, name, default, untrusted)
+                # top-level index
+                elif not virtual:
+                    req.respond(HTTP_OK, ctype)
+                    req.write(self.makeindex(req, tmpl))
+                    return
 
-        url = req.env['REQUEST_URI'].split('?')[0]
-        if not url.endswith('/'):
-            url += '/'
-        pathinfo = req.env.get('PATH_INFO', '').strip('/') + '/'
-        base = url[:len(url) - len(pathinfo)]
-        if not base.endswith('/'):
-            base += '/'
-
-        staticurl = config('web', 'staticurl') or base + 'static/'
-        if not staticurl.endswith('/'):
-            staticurl += '/'
+                # nested indexes and hgwebs
+                
+                repos = dict(self.repos)
+                while virtual:
+                    real = repos.get(virtual)
+                    if real:
+                        req.env['REPO_NAME'] = virtual
+                        try:
+                            repo = hg.repository(self.parentui, real)
+                            hgweb(repo).run_wsgi(req)
+                            return
+                        except IOError, inst:
+                            msg = inst.strerror
+                            raise ErrorResponse(HTTP_SERVER_ERROR, msg)
+                        except hg.RepoError, inst:
+                            raise ErrorResponse(HTTP_SERVER_ERROR, str(inst))
 
-        style = self.style
-        if style is None:
-            style = config('web', 'style', '')
-        if req.form.has_key('style'):
-            style = req.form['style'][0]
-        if self.stripecount is None:
-            self.stripecount = int(config('web', 'stripes', 1))
-        mapfile = style_map(templater.templatepath(), style)
-        tmpl = templater.templater(mapfile, templater.common_filters,
-                                   defaults={"header": header,
-                                             "footer": footer,
-                                             "motd": motd,
-                                             "url": url,
-                                             "staticurl": staticurl})
+                    # browse subdirectories
+                    subdir = virtual + '/'
+                    if [r for r in repos if r.startswith(subdir)]:
+                        req.respond(HTTP_OK, ctype)
+                        req.write(self.makeindex(req, tmpl, subdir))
+                        return
+
+                    up = virtual.rfind('/')
+                    if up < 0:
+                        break
+                    virtual = virtual[:up]
+
+                # prefixes not found
+                req.respond(HTTP_NOT_FOUND, ctype)
+                req.write(tmpl("notfound", repo=virtual))
+
+            except ErrorResponse, err:
+                req.respond(err.code, ctype)
+                req.write(tmpl('error', error=err.message or ''))
+        finally:
+            tmpl = None
+
+    def makeindex(self, req, tmpl, subdir=""):
 
         def archivelist(ui, nodeid, url):
             allowed = ui.configlist("web", "allow_archive", untrusted=True)
@@ -125,7 +148,7 @@
         def entries(sortcolumn="", descending=False, subdir="", **map):
             def sessionvars(**map):
                 fields = []
-                if req.form.has_key('style'):
+                if 'style' in req.form:
                     style = req.form['style'][0]
                     if style != get('web', 'style', ''):
                         fields.append(('style', style))
@@ -142,19 +165,22 @@
                     continue
                 name = name[len(subdir):]
 
-                u = ui.ui(parentui=parentui)
+                u = ui.ui(parentui=self.parentui)
                 try:
                     u.readconfig(os.path.join(path, '.hg', 'hgrc'))
-                except IOError:
-                    pass
+                except Exception, e:
+                    u.warn(_('error reading %s/.hg/hgrc: %s\n' % (path, e)))
+                    continue
                 def get(section, name, default=None):
                     return u.config(section, name, default, untrusted=True)
 
                 if u.configbool("web", "hidden", untrusted=True):
                     continue
 
-                url = ('/'.join([req.env["REQUEST_URI"].split('?')[0], name])
-                       .replace("//", "/")) + '/'
+                parts = [req.env['PATH_INFO'].strip('/'), name]
+                if req.env['SCRIPT_NAME']:
+                    parts.insert(0, req.env['SCRIPT_NAME'])
+                url = ('/'.join(parts).replace("//", "/")) + '/'
 
                 # update time with local timezone
                 try:
@@ -162,9 +188,7 @@
                 except OSError:
                     continue
 
-                contact = (get("ui", "username") or # preferred
-                           get("web", "contact") or # deprecated
-                           get("web", "author", "")) # also
+                contact = get_contact(get)
                 description = get("web", "description", "")
                 name = get("web", "name", name)
                 row = dict(contact=contact or "unknown",
@@ -193,66 +217,62 @@
                     row['parity'] = parity.next()
                     yield row
 
-        def makeindex(req, subdir=""):
-            sortable = ["name", "description", "contact", "lastchange"]
-            sortcolumn, descending = self.repos_sorted
-            if req.form.has_key('sort'):
-                sortcolumn = req.form['sort'][0]
-                descending = sortcolumn.startswith('-')
-                if descending:
-                    sortcolumn = sortcolumn[1:]
-                if sortcolumn not in sortable:
-                    sortcolumn = ""
+        sortable = ["name", "description", "contact", "lastchange"]
+        sortcolumn, descending = self.repos_sorted
+        if 'sort' in req.form:
+            sortcolumn = req.form['sort'][0]
+            descending = sortcolumn.startswith('-')
+            if descending:
+                sortcolumn = sortcolumn[1:]
+            if sortcolumn not in sortable:
+                sortcolumn = ""
 
-            sort = [("sort_%s" % column,
-                     "%s%s" % ((not descending and column == sortcolumn)
-                               and "-" or "", column))
-                    for column in sortable]
-            req.write(tmpl("index", entries=entries, subdir=subdir,
-                           sortcolumn=sortcolumn, descending=descending,
-                           **dict(sort)))
+        sort = [("sort_%s" % column,
+                 "%s%s" % ((not descending and column == sortcolumn)
+                            and "-" or "", column))
+                for column in sortable]
+
+        return tmpl("index", entries=entries, subdir=subdir,
+                    sortcolumn=sortcolumn, descending=descending,
+                    **dict(sort))
+
+    def templater(self, req):
+
+        def header(**map):
+            yield tmpl('header', encoding=util._encoding, **map)
+
+        def footer(**map):
+            yield tmpl("footer", **map)
 
-        try:
-            virtual = req.env.get("PATH_INFO", "").strip('/')
-            if virtual.startswith('static/'):
-                static = os.path.join(templater.templatepath(), 'static')
-                fname = virtual[7:]
-                req.write(staticfile(static, fname, req) or
-                          tmpl('error', error='%r not found' % fname))
-            elif virtual:
-                repos = dict(self.repos)
-                while virtual:
-                    real = repos.get(virtual)
-                    if real:
-                        req.env['REPO_NAME'] = virtual
-                        try:
-                            repo = hg.repository(parentui, real)
-                            hgweb(repo).run_wsgi(req)
-                        except IOError, inst:
-                            req.write(tmpl("error", error=inst.strerror))
-                        except hg.RepoError, inst:
-                            req.write(tmpl("error", error=str(inst)))
-                        return
+        def motd(**map):
+            if self.motd is not None:
+                yield self.motd
+            else:
+                yield config('web', 'motd', '')
+
+        def config(section, name, default=None, untrusted=True):
+            return self.parentui.config(section, name, default, untrusted)
+
+        url = req.env.get('SCRIPT_NAME', '')
+        if not url.endswith('/'):
+            url += '/'
 
-                    # browse subdirectories
-                    subdir = virtual + '/'
-                    if [r for r in repos if r.startswith(subdir)]:
-                        makeindex(req, subdir)
-                        return
-
-                    up = virtual.rfind('/')
-                    if up < 0:
-                        break
-                    virtual = virtual[:up]
+        staticurl = config('web', 'staticurl') or url + 'static/'
+        if not staticurl.endswith('/'):
+            staticurl += '/'
 
-                req.write(tmpl("notfound", repo=virtual))
-            else:
-                if req.form.has_key('static'):
-                    static = os.path.join(templater.templatepath(), "static")
-                    fname = req.form['static'][0]
-                    req.write(staticfile(static, fname, req)
-                              or tmpl("error", error="%r not found" % fname))
-                else:
-                    makeindex(req)
-        finally:
-            tmpl = None
+        style = self.style
+        if style is None:
+            style = config('web', 'style', '')
+        if 'style' in req.form:
+            style = req.form['style'][0]
+        if self.stripecount is None:
+            self.stripecount = int(config('web', 'stripes', 1))
+        mapfile = style_map(templater.templatepath(), style)
+        tmpl = templater.templater(mapfile, templatefilters.filters,
+                                   defaults={"header": header,
+                                             "footer": footer,
+                                             "motd": motd,
+                                             "url": url,
+                                             "staticurl": staticurl})
+        return tmpl
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/hgweb/protocol.py	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,250 @@
+#
+# Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
+# Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
+#
+# This software may be used and distributed according to the terms
+# of the GNU General Public License, incorporated herein by reference.
+
+import cStringIO, zlib, bz2, tempfile, errno, os, sys
+from mercurial import util, streamclone
+from mercurial.i18n import gettext as _
+from mercurial.node import *
+from common import HTTP_OK, HTTP_NOT_FOUND, HTTP_SERVER_ERROR
+
+# __all__ is populated with the allowed commands. Be sure to add to it if
+# you're adding a new command, or the new command won't work.
+
+__all__ = [
+   'lookup', 'heads', 'branches', 'between', 'changegroup',
+   'changegroupsubset', 'capabilities', 'unbundle', 'stream_out',
+]
+
+HGTYPE = 'application/mercurial-0.1'
+
+def lookup(web, req):
+    try:
+        r = hex(web.repo.lookup(req.form['key'][0]))
+        success = 1
+    except Exception,inst:
+        r = str(inst)
+        success = 0
+    resp = "%s %s\n" % (success, r)
+    req.respond(HTTP_OK, HGTYPE, length=len(resp))
+    req.write(resp)
+
+def heads(web, req):
+    resp = " ".join(map(hex, web.repo.heads())) + "\n"
+    req.respond(HTTP_OK, HGTYPE, length=len(resp))
+    req.write(resp)
+
+def branches(web, req):
+    nodes = []
+    if 'nodes' in req.form:
+        nodes = map(bin, req.form['nodes'][0].split(" "))
+    resp = cStringIO.StringIO()
+    for b in web.repo.branches(nodes):
+        resp.write(" ".join(map(hex, b)) + "\n")
+    resp = resp.getvalue()
+    req.respond(HTTP_OK, HGTYPE, length=len(resp))
+    req.write(resp)
+
+def between(web, req):
+    if 'pairs' in req.form:
+        pairs = [map(bin, p.split("-"))
+                 for p in req.form['pairs'][0].split(" ")]
+    resp = cStringIO.StringIO()
+    for b in web.repo.between(pairs):
+        resp.write(" ".join(map(hex, b)) + "\n")
+    resp = resp.getvalue()
+    req.respond(HTTP_OK, HGTYPE, length=len(resp))
+    req.write(resp)
+
+def changegroup(web, req):
+    req.respond(HTTP_OK, HGTYPE)
+    nodes = []
+    if not web.allowpull:
+        return
+
+    if 'roots' in req.form:
+        nodes = map(bin, req.form['roots'][0].split(" "))
+
+    z = zlib.compressobj()
+    f = web.repo.changegroup(nodes, 'serve')
+    while 1:
+        chunk = f.read(4096)
+        if not chunk:
+            break
+        req.write(z.compress(chunk))
+
+    req.write(z.flush())
+
+def changegroupsubset(web, req):
+    req.respond(HTTP_OK, HGTYPE)
+    bases = []
+    heads = []
+    if not web.allowpull:
+        return
+
+    if 'bases' in req.form:
+        bases = [bin(x) for x in req.form['bases'][0].split(' ')]
+    if 'heads' in req.form:
+        heads = [bin(x) for x in req.form['heads'][0].split(' ')]
+
+    z = zlib.compressobj()
+    f = web.repo.changegroupsubset(bases, heads, 'serve')
+    while 1:
+        chunk = f.read(4096)
+        if not chunk:
+            break
+        req.write(z.compress(chunk))
+
+    req.write(z.flush())
+
+def capabilities(web, req):
+    caps = ['lookup', 'changegroupsubset']
+    if web.configbool('server', 'uncompressed'):
+        caps.append('stream=%d' % web.repo.changelog.version)
+    # XXX: make configurable and/or share code with do_unbundle:
+    unbundleversions = ['HG10GZ', 'HG10BZ', 'HG10UN']
+    if unbundleversions:
+        caps.append('unbundle=%s' % ','.join(unbundleversions))
+    resp = ' '.join(caps)
+    req.respond(HTTP_OK, HGTYPE, length=len(resp))
+    req.write(resp)
+
+def unbundle(web, req):
+    def bail(response, headers={}):
+        length = int(req.env['CONTENT_LENGTH'])
+        for s in util.filechunkiter(req, limit=length):
+            # drain incoming bundle, else client will not see
+            # response when run outside cgi script
+            pass
+        req.header(headers.items())
+        req.respond(HTTP_OK, HGTYPE)
+        req.write('0\n')
+        req.write(response)
+
+    # require ssl by default, auth info cannot be sniffed and
+    # replayed
+    ssl_req = web.configbool('web', 'push_ssl', True)
+    if ssl_req:
+        if req.env.get('wsgi.url_scheme') != 'https':
+            bail(_('ssl required\n'))
+            return
+        proto = 'https'
+    else:
+        proto = 'http'
+
+    # do not allow push unless explicitly allowed
+    if not web.check_perm(req, 'push', False):
+        bail(_('push not authorized\n'),
+             headers={'status': '401 Unauthorized'})
+        return
+
+    their_heads = req.form['heads'][0].split(' ')
+
+    def check_heads():
+        heads = map(hex, web.repo.heads())
+        return their_heads == [hex('force')] or their_heads == heads
+
+    # fail early if possible
+    if not check_heads():
+        bail(_('unsynced changes\n'))
+        return
+
+    req.respond(HTTP_OK, HGTYPE)
+
+    # do not lock repo until all changegroup data is
+    # streamed. save to temporary file.
+
+    fd, tempname = tempfile.mkstemp(prefix='hg-unbundle-')
+    fp = os.fdopen(fd, 'wb+')
+    try:
+        length = int(req.env['CONTENT_LENGTH'])
+        for s in util.filechunkiter(req, limit=length):
+            fp.write(s)
+
+        try:
+            lock = web.repo.lock()
+            try:
+                if not check_heads():
+                    req.write('0\n')
+                    req.write(_('unsynced changes\n'))
+                    return
+
+                fp.seek(0)
+                header = fp.read(6)
+                if not header.startswith("HG"):
+                    # old client with uncompressed bundle
+                    def generator(f):
+                        yield header
+                        for chunk in f:
+                            yield chunk
+                elif not header.startswith("HG10"):
+                    req.write("0\n")
+                    req.write(_("unknown bundle version\n"))
+                    return
+                elif header == "HG10GZ":
+                    def generator(f):
+                        zd = zlib.decompressobj()
+                        for chunk in f:
+                            yield zd.decompress(chunk)
+                elif header == "HG10BZ":
+                    def generator(f):
+                        zd = bz2.BZ2Decompressor()
+                        zd.decompress("BZ")
+                        for chunk in f:
+                            yield zd.decompress(chunk)
+                elif header == "HG10UN":
+                    def generator(f):
+                        for chunk in f:
+                            yield chunk
+                else:
+                    req.write("0\n")
+                    req.write(_("unknown bundle compression type\n"))
+                    return
+                gen = generator(util.filechunkiter(fp, 4096))
+
+                # send addchangegroup output to client
+
+                old_stdout = sys.stdout
+                sys.stdout = cStringIO.StringIO()
+
+                try:
+                    url = 'remote:%s:%s' % (proto,
+                                            req.env.get('REMOTE_HOST', ''))
+                    try:
+                        ret = web.repo.addchangegroup(
+                                    util.chunkbuffer(gen), 'serve', url)
+                    except util.Abort, inst:
+                        sys.stdout.write("abort: %s\n" % inst)
+                        ret = 0
+                finally:
+                    val = sys.stdout.getvalue()
+                    sys.stdout = old_stdout
+                req.write('%d\n' % ret)
+                req.write(val)
+            finally:
+                del lock
+        except (OSError, IOError), inst:
+            req.write('0\n')
+            filename = getattr(inst, 'filename', '')
+            # Don't send our filesystem layout to the client
+            if filename.startswith(web.repo.root):
+                filename = filename[len(web.repo.root)+1:]
+            else:
+                filename = ''
+            error = getattr(inst, 'strerror', 'Unknown error')
+            if inst.errno == errno.ENOENT:
+                code = HTTP_NOT_FOUND
+            else:
+                code = HTTP_SERVER_ERROR
+            req.respond(code)
+            req.write('%s: %s\n' % (error, filename))
+    finally:
+        fp.close()
+        os.unlink(tempname)
+
+def stream_out(web, req):
+    req.respond(HTTP_OK, HGTYPE)
+    streamclone.stream_out(web.repo, req, untrusted=True)
--- a/mercurial/hgweb/request.py	Thu Jul 26 07:56:27 2007 -0400
+++ b/mercurial/hgweb/request.py	Wed Feb 06 19:57:52 2008 -0800
@@ -8,33 +8,24 @@
 
 import socket, cgi, errno
 from mercurial.i18n import gettext as _
-
-class wsgiapplication(object):
-    def __init__(self, destmaker):
-        self.destmaker = destmaker
+from common import ErrorResponse, statusmessage
 
-    def __call__(self, wsgienv, start_response):
-        return _wsgirequest(self.destmaker(), wsgienv, start_response)
-
-class _wsgirequest(object):
-    def __init__(self, destination, wsgienv, start_response):
+class wsgirequest(object):
+    def __init__(self, wsgienv, start_response):
         version = wsgienv['wsgi.version']
         if (version < (1, 0)) or (version >= (2, 0)):
             raise RuntimeError("Unknown and unsupported WSGI version %d.%d"
                                % version)
         self.inp = wsgienv['wsgi.input']
-        self.server_write = None
         self.err = wsgienv['wsgi.errors']
         self.threaded = wsgienv['wsgi.multithread']
         self.multiprocess = wsgienv['wsgi.multiprocess']
         self.run_once = wsgienv['wsgi.run_once']
         self.env = wsgienv
         self.form = cgi.parse(self.inp, self.env, keep_blank_values=1)
-        self.start_response = start_response
+        self._start_response = start_response
+        self.server_write = None
         self.headers = []
-        destination.run_wsgi(self)
-
-    out = property(lambda self: self)
 
     def __iter__(self):
         return iter([])
@@ -42,25 +33,39 @@
     def read(self, count=-1):
         return self.inp.read(count)
 
-    def write(self, *things):
-        for thing in things:
-            if hasattr(thing, "__iter__"):
-                for part in thing:
-                    self.write(part)
-            else:
-                thing = str(thing)
-                if self.server_write is None:
-                    if not self.headers:
-                        raise RuntimeError("request.write called before headers sent (%s)." % thing)
-                    self.server_write = self.start_response('200 Script output follows',
-                                                            self.headers)
-                    self.start_response = None
-                    self.headers = None
-                try:
-                    self.server_write(thing)
-                except socket.error, inst:
-                    if inst[0] != errno.ECONNRESET:
-                        raise
+    def respond(self, status, type=None, filename=None, length=0):
+        if self._start_response is not None:
+
+            self.httphdr(type, filename, length)
+            if not self.headers:
+                raise RuntimeError("request.write called before headers sent")
+
+            for k, v in self.headers:
+                if not isinstance(v, str):
+                    raise TypeError('header value must be string: %r' % v)
+
+            if isinstance(status, ErrorResponse):
+                status = statusmessage(status.code)
+            elif status == 200:
+                status = '200 Script output follows'
+            elif isinstance(status, int):
+                status = statusmessage(status)
+
+            self.server_write = self._start_response(status, self.headers)
+            self._start_response = None
+            self.headers = []
+
+    def write(self, thing):
+        if hasattr(thing, "__iter__"):
+            for part in thing:
+                self.write(part)
+        else:
+            thing = str(thing)
+            try:
+                self.server_write(thing)
+            except socket.error, inst:
+                if inst[0] != errno.ECONNRESET:
+                    raise
 
     def writelines(self, lines):
         for line in lines:
@@ -72,15 +77,24 @@
     def close(self):
         return None
 
-    def header(self, headers=[('Content-type','text/html')]):
+    def header(self, headers=[('Content-Type','text/html')]):
         self.headers.extend(headers)
 
-    def httphdr(self, type, filename=None, length=0, headers={}):
+    def httphdr(self, type=None, filename=None, length=0, headers={}):
         headers = headers.items()
-        headers.append(('Content-type', type))
+        if type is not None:
+            headers.append(('Content-Type', type))
         if filename:
-            headers.append(('Content-disposition', 'attachment; filename=%s' %
+            headers.append(('Content-Disposition', 'inline; filename=%s' %
                             filename))
         if length:
-            headers.append(('Content-length', str(length)))
+            headers.append(('Content-Length', str(length)))
         self.header(headers)
+
+def wsgiapplication(app_maker):
+    '''For compatibility with old CGI scripts. A plain hgweb() or hgwebdir()
+    can and should now be used as a WSGI application.'''
+    application = app_maker()
+    def run_wsgi(env, respond):
+        return application(env, respond)
+    return run_wsgi
--- a/mercurial/hgweb/server.py	Thu Jul 26 07:56:27 2007 -0400
+++ b/mercurial/hgweb/server.py	Wed Feb 06 19:57:52 2008 -0800
@@ -10,7 +10,6 @@
 from mercurial import ui, hg, util, templater
 from hgweb_mod import hgweb
 from hgwebdir_mod import hgwebdir
-from request import wsgiapplication
 from mercurial.i18n import gettext as _
 
 def _splitURI(uri):
@@ -44,17 +43,17 @@
         self.protocol_version = 'HTTP/1.1'
         BaseHTTPServer.BaseHTTPRequestHandler.__init__(self, *args, **kargs)
 
+    def _log_any(self, fp, format, *args):
+        fp.write("%s - - [%s] %s\n" % (self.client_address[0],
+                                       self.log_date_time_string(),
+                                       format % args))
+        fp.flush()
+
     def log_error(self, format, *args):
-        errorlog = self.server.errorlog
-        errorlog.write("%s - - [%s] %s\n" % (self.client_address[0],
-                                             self.log_date_time_string(),
-                                             format % args))
+        self._log_any(self.server.errorlog, format, *args)
 
     def log_message(self, format, *args):
-        accesslog = self.server.accesslog
-        accesslog.write("%s - - [%s] %s\n" % (self.client_address[0],
-                                              self.log_date_time_string(),
-                                              format % args))
+        self._log_any(self.server.accesslog, format, *args)
 
     def do_write(self):
         try:
@@ -77,7 +76,7 @@
         self.do_POST()
 
     def do_hgweb(self):
-        path_info, query = _splitURI(self.path)
+        path, query = _splitURI(self.path)
 
         env = {}
         env['GATEWAY_INTERFACE'] = 'CGI/1.1'
@@ -85,7 +84,8 @@
         env['SERVER_NAME'] = self.server.server_name
         env['SERVER_PORT'] = str(self.server.server_port)
         env['REQUEST_URI'] = self.path
-        env['PATH_INFO'] = path_info
+        env['SCRIPT_NAME'] = self.server.prefix
+        env['PATH_INFO'] = path[len(self.server.prefix):]
         env['REMOTE_HOST'] = self.client_address[0]
         env['REMOTE_ADDR'] = self.client_address[0]
         if query:
@@ -121,10 +121,7 @@
         self.saved_headers = []
         self.sent_headers = False
         self.length = None
-        req = self.server.reqmaker(env, self._start_response)
-        for data in req:
-            if data:
-                self._write(data)
+        self.server.application(env, self._start_response)
 
     def send_headers(self):
         if not self.saved_status:
@@ -200,16 +197,23 @@
 
     def openlog(opt, default):
         if opt and opt != '-':
-            return open(opt, 'w')
+            return open(opt, 'a')
         return default
 
-    address = repo.ui.config("web", "address", "")
-    port = int(repo.ui.config("web", "port", 8000))
-    use_ipv6 = repo.ui.configbool("web", "ipv6")
-    webdir_conf = repo.ui.config("web", "webdir_conf")
-    ssl_cert = repo.ui.config("web", "certificate")
-    accesslog = openlog(repo.ui.config("web", "accesslog", "-"), sys.stdout)
-    errorlog = openlog(repo.ui.config("web", "errorlog", "-"), sys.stderr)
+    if repo is None:
+        myui = ui
+    else:
+        myui = repo.ui
+    address = myui.config("web", "address", "")
+    port = int(myui.config("web", "port", 8000))
+    prefix = myui.config("web", "prefix", "")
+    if prefix:
+        prefix = "/" + prefix.strip("/")
+    use_ipv6 = myui.configbool("web", "ipv6")
+    webdir_conf = myui.config("web", "webdir_conf")
+    ssl_cert = myui.config("web", "certificate")
+    accesslog = openlog(myui.config("web", "accesslog", "-"), sys.stdout)
+    errorlog = openlog(myui.config("web", "errorlog", "-"), sys.stderr)
 
     if use_threads:
         try:
@@ -246,13 +250,14 @@
                     raise hg.RepoError(_("There is no Mercurial repository here"
                                          " (.hg not found)"))
                 return hgwebobj
-            self.reqmaker = wsgiapplication(make_handler)
+            self.application = make_handler()
 
             addr = address
             if addr in ('', '::'):
                 addr = socket.gethostname()
 
             self.addr, self.port = addr, port
+            self.prefix = prefix
 
             if ssl_cert:
                 try:
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/hgweb/webcommands.py	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,120 @@
+#
+# Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
+# Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
+#
+# This software may be used and distributed according to the terms
+# of the GNU General Public License, incorporated herein by reference.
+
+import os, mimetypes
+from mercurial import revlog, util, hg
+from common import staticfile, ErrorResponse, HTTP_OK, HTTP_NOT_FOUND
+
+# __all__ is populated with the allowed commands. Be sure to add to it if
+# you're adding a new command, or the new command won't work.
+
+__all__ = [
+   'log', 'rawfile', 'file', 'changelog', 'shortlog', 'changeset', 'rev',
+   'manifest', 'tags', 'summary', 'filediff', 'diff', 'annotate', 'filelog',
+   'archive', 'static',
+]
+
+def log(web, req, tmpl):
+    if 'file' in req.form and req.form['file'][0]:
+        return filelog(web, req, tmpl)
+    else:
+        return changelog(web, req, tmpl)
+
+def rawfile(web, req, tmpl):
+    path = web.cleanpath(req.form.get('file', [''])[0])
+    if not path:
+        content = web.manifest(tmpl, web.changectx(req), path)
+        req.respond(HTTP_OK, web.ctype)
+        return content
+
+    try:
+        fctx = web.filectx(req)
+    except revlog.LookupError:
+        content = web.manifest(tmpl, web.changectx(req), path)
+        req.respond(HTTP_OK, web.ctype)
+        return content
+
+    path = fctx.path()
+    text = fctx.data()
+    mt = mimetypes.guess_type(path)[0]
+    if mt is None or util.binary(text):
+        mt = mt or 'application/octet-stream'
+
+    req.respond(HTTP_OK, mt, path, len(text))
+    return [text]
+
+def file(web, req, tmpl):
+    path = web.cleanpath(req.form.get('file', [''])[0])
+    if path:
+        try:
+            return web.filerevision(tmpl, web.filectx(req))
+        except revlog.LookupError:
+            pass
+
+    return web.manifest(tmpl, web.changectx(req), path)
+
+def changelog(web, req, tmpl, shortlog = False):
+    if 'node' in req.form:
+        ctx = web.changectx(req)
+    else:
+        if 'rev' in req.form:
+            hi = req.form['rev'][0]
+        else:
+            hi = web.repo.changelog.count() - 1
+        try:
+            ctx = web.repo.changectx(hi)
+        except hg.RepoError:
+            return web.search(tmpl, hi) # XXX redirect to 404 page?
+
+    return web.changelog(tmpl, ctx, shortlog = shortlog)
+
+def shortlog(web, req, tmpl):
+    return changelog(web, req, tmpl, shortlog = True)
+
+def changeset(web, req, tmpl):
+    return web.changeset(tmpl, web.changectx(req))
+
+rev = changeset
+
+def manifest(web, req, tmpl):
+    return web.manifest(tmpl, web.changectx(req),
+                        web.cleanpath(req.form['path'][0]))
+
+def tags(web, req, tmpl):
+    return web.tags(tmpl)
+
+def summary(web, req, tmpl):
+    return web.summary(tmpl)
+
+def filediff(web, req, tmpl):
+    return web.filediff(tmpl, web.filectx(req))
+
+diff = filediff
+
+def annotate(web, req, tmpl):
+    return web.fileannotate(tmpl, web.filectx(req))
+
+def filelog(web, req, tmpl):
+    return web.filelog(tmpl, web.filectx(req))
+
+def archive(web, req, tmpl):
+    type_ = req.form['type'][0]
+    allowed = web.configlist("web", "allow_archive")
+    if (type_ in web.archives and (type_ in allowed or
+        web.configbool("web", "allow" + type_, False))):
+        web.archive(tmpl, req, req.form['node'][0], type_)
+        return []
+    raise ErrorResponse(HTTP_NOT_FOUND, 'Unsupported archive type: %s' % type_)
+
+def static(web, req, tmpl):
+    fname = req.form['file'][0]
+    # a repo owner may set web.static in .hg/hgrc to get any file
+    # readable by the user running the CGI script
+    static = web.config("web", "static",
+                        os.path.join(web.templatepath, "static"),
+                        untrusted=False)
+    return [staticfile(static, fname, req)]
--- a/mercurial/hgweb/wsgicgi.py	Thu Jul 26 07:56:27 2007 -0400
+++ b/mercurial/hgweb/wsgicgi.py	Wed Feb 06 19:57:52 2008 -0800
@@ -16,6 +16,7 @@
     util.set_binary(sys.stdout)
 
     environ = dict(os.environ.items())
+    environ.setdefault('PATH_INFO', '')
     environ['wsgi.input'] = sys.stdin
     environ['wsgi.errors'] = sys.stderr
     environ['wsgi.version'] = (1, 0)
@@ -61,13 +62,4 @@
         headers_set[:] = [status, response_headers]
         return write
 
-    result = application(environ, start_response)
-    try:
-        for data in result:
-            if data:    # don't send headers until body appears
-                write(data)
-        if not headers_sent:
-            write('')   # send headers now if body was empty
-    finally:
-        if hasattr(result,'close'):
-            result.close()
+    application(environ, start_response)
--- a/mercurial/hook.py	Thu Jul 26 07:56:27 2007 -0400
+++ b/mercurial/hook.py	Wed Feb 06 19:57:52 2008 -0800
@@ -6,7 +6,7 @@
 # of the GNU General Public License, incorporated herein by reference.
 
 from i18n import _
-import util
+import util, os, sys
 
 def _pythonhook(ui, repo, name, hname, funcname, args, throw):
     '''call python hook. hook is callable object, looked up as
@@ -71,7 +71,11 @@
 def _exthook(ui, repo, name, cmd, args, throw):
     ui.note(_("running hook %s: %s\n") % (name, cmd))
     env = dict([('HG_' + k.upper(), v) for k, v in args.iteritems()])
-    r = util.system(cmd, environ=env, cwd=repo.root)
+    if repo:
+        cwd = repo.root
+    else:
+        cwd = os.getcwd()
+    r = util.system(cmd, environ=env, cwd=cwd)
     if r:
         desc, r = util.explain_exit(r)
         if throw:
@@ -79,8 +83,18 @@
         ui.warn(_('warning: %s hook %s\n') % (name, desc))
     return r
 
+_redirect = False
+def redirect(state):
+    _redirect = state
+
 def hook(ui, repo, name, throw=False, **args):
     r = False
+
+    if _redirect:
+        # temporarily redirect stdout to stderr
+        oldstdout = os.dup(sys.stdout.fileno())
+        os.dup2(sys.stderr.fileno(), sys.stdout.fileno())
+
     hooks = [(hname, cmd) for hname, cmd in ui.configitems("hooks")
              if hname.split(".", 1)[0] == name and cmd]
     hooks.sort()
@@ -94,3 +108,6 @@
             r = _exthook(ui, repo, hname, cmd, args, throw) or r
     return r
 
+    if _redirect:
+        os.dup2(oldstdout, sys.stdout.fileno())
+        os.close(oldstdout)
--- a/mercurial/httprepo.py	Thu Jul 26 07:56:27 2007 -0400
+++ b/mercurial/httprepo.py	Wed Feb 06 19:57:52 2008 -0800
@@ -9,7 +9,7 @@
 from node import *
 from remoterepo import *
 from i18n import _
-import hg, os, urllib, urllib2, urlparse, zlib, util, httplib
+import repo, os, urllib, urllib2, urlparse, zlib, util, httplib
 import errno, keepalive, tempfile, socket, changegroup
 
 class passwordmgr(urllib2.HTTPPasswordMgrWithDefaultRealm):
@@ -103,10 +103,13 @@
     # must be able to send big bundle as stream.
     send = _gen_sendfile(keepalive.HTTPConnection)
 
-class basehttphandler(keepalive.HTTPHandler):
+class httphandler(keepalive.HTTPHandler):
     def http_open(self, req):
         return self.do_open(httpconnection, req)
 
+    def __del__(self):
+        self.close_all()
+
 has_https = hasattr(urllib2, 'HTTPSHandler')
 if has_https:
     class httpsconnection(httplib.HTTPSConnection):
@@ -114,12 +117,9 @@
         # must be able to send big bundle as stream.
         send = _gen_sendfile(httplib.HTTPSConnection)
 
-    class httphandler(basehttphandler, urllib2.HTTPSHandler):
+    class httpshandler(keepalive.KeepAliveHandler, urllib2.HTTPSHandler):
         def https_open(self, req):
             return self.do_open(httpsconnection, req)
-else:
-    class httphandler(basehttphandler):
-        pass
 
 # In python < 2.5 AbstractDigestAuthHandler raises a ValueError if
 # it doesn't know about the auth type requested.  This can happen if
@@ -144,6 +144,43 @@
         raise IOError(None, _('connection ended unexpectedly'))
     yield zd.flush()
 
+_safe = ('abcdefghijklmnopqrstuvwxyz'
+         'ABCDEFGHIJKLMNOPQRSTUVWXYZ'
+         '0123456789' '_.-/')
+_safeset = None
+_hex = None
+def quotepath(path):
+    '''quote the path part of a URL
+
+    This is similar to urllib.quote, but it also tries to avoid
+    quoting things twice (inspired by wget):
+
+    >>> quotepath('abc def')
+    'abc%20def'
+    >>> quotepath('abc%20def')
+    'abc%20def'
+    >>> quotepath('abc%20 def')
+    'abc%20%20def'
+    >>> quotepath('abc def%20')
+    'abc%20def%20'
+    >>> quotepath('abc def%2')
+    'abc%20def%252'
+    >>> quotepath('abc def%')
+    'abc%20def%25'
+    '''
+    global _safeset, _hex
+    if _safeset is None:
+        _safeset = util.set(_safe)
+        _hex = util.set('abcdefABCDEF0123456789')
+    l = list(path)
+    for i in xrange(len(l)):
+        c = l[i]
+        if c == '%' and i + 2 < len(l) and (l[i+1] in _hex and l[i+2] in _hex):
+            pass
+        elif c not in _safeset:
+            l[i] = '%%%02X' % ord(c)
+    return ''.join(l)
+
 class httprepository(remoterepository):
     def __init__(self, ui, path):
         self.path = path
@@ -153,18 +190,22 @@
         if query or frag:
             raise util.Abort(_('unsupported URL component: "%s"') %
                              (query or frag))
-        if not urlpath: urlpath = '/'
+        if not urlpath:
+            urlpath = '/'
+        urlpath = quotepath(urlpath)
         host, port, user, passwd = netlocsplit(netloc)
 
         # urllib cannot handle URLs with embedded user or passwd
         self._url = urlparse.urlunsplit((scheme, netlocunsplit(host, port),
                                          urlpath, '', ''))
         self.ui = ui
+        self.ui.debug(_('using %s\n') % self._url)
 
         proxyurl = ui.config("http_proxy", "host") or os.getenv('http_proxy')
         # XXX proxyauthinfo = None
-        self.handler = httphandler()
-        handlers = [self.handler]
+        handlers = [httphandler()]
+        if has_https:
+            handlers.append(httpshandler())
 
         if proxyurl:
             # proxy can be proper url or host[:port]
@@ -190,6 +231,9 @@
             # "http_proxy.always" config is for running tests on localhost
             if (not ui.configbool("http_proxy", "always") and
                 host.lower() in no_list):
+                # avoid auto-detection of proxy settings by appending
+                # a ProxyHandler with no proxies defined.
+                handlers.append(urllib2.ProxyHandler({}))
                 ui.debug(_('disabling proxy for %s\n') % host)
             else:
                 proxyurl = urlparse.urlunsplit((
@@ -204,7 +248,7 @@
         # will take precedence if found, so drop them
         for env in ["HTTP_PROXY", "http_proxy", "no_proxy"]:
             try:
-                if os.environ.has_key(env):
+                if env in os.environ:
                     del os.environ[env]
             except OSError:
                 pass
@@ -213,7 +257,11 @@
         if user:
             ui.debug(_('http auth: user %s, password %s\n') %
                      (user, passwd and '*' * len(passwd) or 'not set'))
-            passmgr.add_password(None, host, user, passwd or '')
+            netloc = host
+            if port:
+                netloc += ':' + port
+            # Python < 2.4.3 uses only the netloc to search for a password
+            passmgr.add_password(None, (self._url, netloc), user, passwd or '')
 
         handlers.extend((urllib2.HTTPBasicAuthHandler(passmgr),
                          httpdigestauthhandler(passmgr)))
@@ -223,11 +271,6 @@
         opener.addheaders = [('User-agent', 'mercurial/proto-1.0')]
         urllib2.install_opener(opener)
 
-    def __del__(self):
-        if self.handler:
-            self.handler.close_all()
-            self.handler = None
-
     def url(self):
         return self.path
 
@@ -236,9 +279,9 @@
     def get_caps(self):
         if self.caps is None:
             try:
-                self.caps = self.do_read('capabilities').split()
-            except hg.RepoError:
-                self.caps = ()
+                self.caps = util.set(self.do_read('capabilities').split())
+            except repo.RepoError:
+                self.caps = util.set()
             self.ui.debug(_('capabilities: %s\n') %
                           (' '.join(self.caps or ['none'])))
         return self.caps
@@ -258,8 +301,7 @@
         cu = "%s%s" % (self._url, qs)
         try:
             if data:
-                self.ui.debug(_("sending %s bytes\n") %
-                              headers.get('content-length', 'X'))
+                self.ui.debug(_("sending %s bytes\n") % len(data))
             resp = urllib2.urlopen(request(cu, data, headers))
         except urllib2.HTTPError, inst:
             if inst.code == 401:
@@ -289,7 +331,7 @@
                 proto.startswith('text/plain') or
                 proto.startswith('application/hg-changegroup')):
             self.ui.debug(_("Requested URL: '%s'\n") % cu)
-            raise hg.RepoError(_("'%s' does not appear to be an hg repository")
+            raise repo.RepoError(_("'%s' does not appear to be an hg repository")
                                % self._url)
 
         if proto.startswith('application/mercurial-'):
@@ -297,10 +339,10 @@
                 version = proto.split('-', 1)[1]
                 version_info = tuple([int(n) for n in version.split('.')])
             except ValueError:
-                raise hg.RepoError(_("'%s' sent a broken Content-type "
+                raise repo.RepoError(_("'%s' sent a broken Content-Type "
                                      "header (%s)") % (self._url, proto))
             if version_info > (0, 1):
-                raise hg.RepoError(_("'%s' uses newer protocol %s") %
+                raise repo.RepoError(_("'%s' uses newer protocol %s") %
                                    (self._url, version))
 
         return resp
@@ -314,11 +356,12 @@
             fp.close()
 
     def lookup(self, key):
+        self.requirecap('lookup', _('look up remote revision'))
         d = self.do_cmd("lookup", key = key).read()
         success, data = d[:-1].split(' ', 1)
         if int(success):
             return bin(data)
-        raise hg.RepoError(data)
+        raise repo.RepoError(data)
 
     def heads(self):
         d = self.do_read("heads")
@@ -351,6 +394,7 @@
         return util.chunkbuffer(zgenerator(f))
 
     def changegroupsubset(self, bases, heads, source):
+        self.requirecap('changegroupsubset', _('look up remote changes'))
         baselst = " ".join([hex(n) for n in bases])
         headlst = " ".join([hex(n) for n in heads])
         f = self.do_cmd("changegroupsubset", bases=baselst, heads=headlst)
@@ -380,7 +424,7 @@
             try:
                 rfp = self.do_cmd(
                     'unbundle', data=fp,
-                    headers={'content-type': 'application/octet-stream'},
+                    headers={'Content-Type': 'application/octet-stream'},
                     heads=' '.join(map(hex, heads)))
                 try:
                     ret = int(rfp.readline())
--- a/mercurial/ignore.py	Thu Jul 26 07:56:27 2007 -0400
+++ b/mercurial/ignore.py	Wed Feb 06 19:57:52 2008 -0800
@@ -6,18 +6,21 @@
 # of the GNU General Public License, incorporated herein by reference.
 
 from i18n import _
-import util
+import util, re
+
+_commentre = None
 
 def _parselines(fp):
     for line in fp:
-        if not line.endswith('\n'):
-            line += '\n'
-        escape = False
-        for i in xrange(len(line)):
-            if escape: escape = False
-            elif line[i] == '\\': escape = True
-            elif line[i] == '#': break
-        line = line[:i].rstrip()
+        if "#" in line:
+            global _commentre
+            if not _commentre:
+                _commentre = re.compile(r'((^|[^\\])(\\\\)*)#.*')
+            # remove comments prefixed by an even number of escapes
+            line = _commentre.sub(r'\1', line)
+            # fixup properly escaped comments that survived the above
+            line = line.replace("\\#", "#")
+        line = line.rstrip()
         if line:
             yield line
 
@@ -57,10 +60,13 @@
                         warn(_("%s: ignoring invalid syntax '%s'\n") % (f, s))
                     continue
                 pat = syntax + line
-                for s in syntaxes.values():
-                    if line.startswith(s):
+                for s, rels in syntaxes.items():
+                    if line.startswith(rels):
                         pat = line
                         break
+                    elif line.startswith(s+':'):
+                        pat = rels + line[len(s)+1:]
+                        break
                 pats[f].append(pat)
         except IOError, inst:
             if f != files[0]:
@@ -82,9 +88,3 @@
                 util.matcher(root, inc=patlist, src=f))
 
     return ignorefunc
-
-
-    '''default match function used by dirstate and
-    localrepository.  this honours the repository .hgignore file
-    and any other files specified in the [ui] section of .hgrc.'''
-
--- a/mercurial/keepalive.py	Thu Jul 26 07:56:27 2007 -0400
+++ b/mercurial/keepalive.py	Wed Feb 06 19:57:52 2008 -0800
@@ -129,7 +129,7 @@
     def add(self, host, connection, ready):
         self._lock.acquire()
         try:
-            if not self._hostmap.has_key(host): self._hostmap[host] = []
+            if not host in self._hostmap: self._hostmap[host] = []
             self._hostmap[host].append(connection)
             self._connmap[connection] = host
             self._readymap[connection] = ready
@@ -159,7 +159,7 @@
         conn = None
         self._lock.acquire()
         try:
-            if self._hostmap.has_key(host):
+            if host in self._hostmap:
                 for c in self._hostmap[host]:
                     if self._readymap[c]:
                         self._readymap[c] = 0
@@ -175,7 +175,7 @@
         else:
             return dict(self._hostmap)
 
-class HTTPHandler(urllib2.HTTPHandler):
+class KeepAliveHandler:
     def __init__(self):
         self._cm = ConnectionManager()
 
@@ -314,6 +314,9 @@
         except socket.error, err: # XXX what error?
             raise urllib2.URLError(err)
 
+class HTTPHandler(KeepAliveHandler, urllib2.HTTPHandler):
+    pass
+
 class HTTPResponse(httplib.HTTPResponse):
     # we need to subclass HTTPResponse in order to
     # 1) add readline() and readlines() methods
--- a/mercurial/localrepo.py	Thu Jul 26 07:56:27 2007 -0400
+++ b/mercurial/localrepo.py	Wed Feb 06 19:57:52 2008 -0800
@@ -9,16 +9,15 @@
 from i18n import _
 import repo, changegroup
 import changelog, dirstate, filelog, manifest, context, weakref
-import re, lock, transaction, tempfile, stat, mdiff, errno, ui
-import os, revlog, time, util, extensions, hook
+import re, lock, transaction, tempfile, stat, errno, ui
+import os, revlog, time, util, extensions, hook, inspect
 
 class localrepository(repo.repository):
-    capabilities = ('lookup', 'changegroupsubset')
+    capabilities = util.set(('lookup', 'changegroupsubset'))
     supported = ('revlogv1', 'store')
 
     def __init__(self, parentui, path=None, create=0):
         repo.repository.__init__(self)
-        self.path = path
         self.root = os.path.realpath(path)
         self.path = os.path.join(self.root, ".hg")
         self.origroot = path
@@ -69,7 +68,8 @@
             self.encodefn = lambda x: x
             self.decodefn = lambda x: x
             self.spath = self.path
-        self.sopener = util.encodedopener(util.opener(self.spath), self.encodefn)
+        self.sopener = util.encodedopener(util.opener(self.spath),
+                                          self.encodefn)
 
         self.ui = ui.ui(parentui=parentui)
         try:
@@ -79,9 +79,11 @@
             pass
 
         self.tagscache = None
+        self._tagstypecache = None
         self.branchcache = None
         self.nodetagscache = None
         self.filterpats = {}
+        self._datafilters = {}
         self._transref = self._lockref = self._wlockref = None
 
     def __getattr__(self, name):
@@ -118,11 +120,11 @@
         self.hook('pretag', throw=True, node=hex(node), tag=name, local=local)
 
         def writetag(fp, name, munge, prevtags):
+            fp.seek(0, 2)
             if prevtags and prevtags[-1] != '\n':
                 fp.write('\n')
             fp.write('%s %s\n' % (hex(node), munge and munge(name) or name))
             fp.close()
-            self.hook('tag', node=hex(node), tag=name, local=local)
 
         prevtags = ''
         if local:
@@ -135,6 +137,7 @@
 
             # local tags are stored in the current charset
             writetag(fp, name, None, prevtags)
+            self.hook('tag', node=hex(node), tag=name, local=local)
             return
 
         if use_dirstate:
@@ -150,6 +153,8 @@
             except revlog.LookupError:
                 pass
             fp = self.wfile('.hgtags', 'wb')
+            if prevtags:
+                fp.write(prevtags)
 
         # committed tags are stored in UTF-8
         writetag(fp, name, util.fromlocal, prevtags)
@@ -196,8 +201,9 @@
             return self.tagscache
 
         globaltags = {}
+        tagtypes = {}
 
-        def readtags(lines, fn):
+        def readtags(lines, fn, tagtype):
             filetags = {}
             count = 0
 
@@ -232,7 +238,9 @@
             for k, nh in filetags.items():
                 if k not in globaltags:
                     globaltags[k] = nh
+                    tagtypes[k] = tagtype
                     continue
+
                 # we prefer the global tag if:
                 #  it supercedes us OR
                 #  mutual supercedes and it has a higher rank
@@ -244,31 +252,47 @@
                     an = bn
                 ah.extend([n for n in bh if n not in ah])
                 globaltags[k] = an, ah
+                tagtypes[k] = tagtype
 
         # read the tags file from each head, ending with the tip
         f = None
         for rev, node, fnode in self._hgtagsnodes():
             f = (f and f.filectx(fnode) or
                  self.filectx('.hgtags', fileid=fnode))
-            readtags(f.data().splitlines(), f)
+            readtags(f.data().splitlines(), f, "global")
 
         try:
             data = util.fromlocal(self.opener("localtags").read())
             # localtags are stored in the local character set
             # while the internal tag table is stored in UTF-8
-            readtags(data.splitlines(), "localtags")
+            readtags(data.splitlines(), "localtags", "local")
         except IOError:
             pass
 
         self.tagscache = {}
+        self._tagstypecache = {}
         for k,nh in globaltags.items():
             n = nh[0]
             if n != nullid:
                 self.tagscache[k] = n
+                self._tagstypecache[k] = tagtypes[k]
         self.tagscache['tip'] = self.changelog.tip()
 
         return self.tagscache
 
+    def tagtype(self, tagname):
+        '''
+        return the type of the given tag. result can be:
+
+        'local'  : a local tag
+        'global' : a global tag
+        None     : tag does not exist
+        '''
+
+        self.tags()
+
+        return self._tagstypecache.get(tagname)
+
     def _hgtagsnodes(self):
         heads = self.heads()
         heads.reverse()
@@ -463,17 +487,31 @@
             l = []
             for pat, cmd in self.ui.configitems(filter):
                 mf = util.matcher(self.root, "", [pat], [], [])[1]
-                l.append((mf, cmd))
+                fn = None
+                for name, filterfn in self._datafilters.iteritems():
+                    if cmd.startswith(name): 
+                        fn = filterfn
+                        break
+                if not fn:
+                    fn = lambda s, c, **kwargs: util.filter(s, c)
+                # Wrap old filters not supporting keyword arguments
+                if not inspect.getargspec(fn)[2]:
+                    oldfn = fn
+                    fn = lambda s, c, **kwargs: oldfn(s, c)
+                l.append((mf, fn, cmd))
             self.filterpats[filter] = l
 
-        for mf, cmd in self.filterpats[filter]:
+        for mf, fn, cmd in self.filterpats[filter]:
             if mf(filename):
                 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
-                data = util.filter(data, cmd)
+                data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
                 break
 
         return data
 
+    def adddatafilter(self, name, filter):
+        self._datafilters[name] = filter
+
     def wread(self, filename):
         if self._link(filename):
             data = os.readlink(self.wjoin(filename))
@@ -483,16 +521,12 @@
 
     def wwrite(self, filename, data, flags):
         data = self._filter("decode", filename, data)
-        if "l" in flags:
-            self.wopener.symlink(data, filename)
-        else:
-            try:
-                if self._link(filename):
-                    os.unlink(self.wjoin(filename))
-            except OSError:
-                pass
-            self.wopener(filename, 'w').write(data)
-            util.set_exec(self.wjoin(filename), "x" in flags)
+        try:
+            os.unlink(self.wjoin(filename))
+        except OSError:
+            pass
+        self.wopener(filename, 'w').write(data)
+        util.set_flags(self.wjoin(filename), flags)
 
     def wwritedata(self, filename, data):
         return self._filter("decode", filename, data)
@@ -501,15 +535,21 @@
         if self._transref and self._transref():
             return self._transref().nest()
 
+        # abort here if the journal already exists
+        if os.path.exists(self.sjoin("journal")):
+            raise repo.RepoError(_("journal already exists - run hg recover"))
+
         # save dirstate for rollback
         try:
             ds = self.opener("dirstate").read()
         except IOError:
             ds = ""
         self.opener("journal.dirstate", "w").write(ds)
+        self.opener("journal.branch", "w").write(self.dirstate.branch())
 
         renames = [(self.sjoin("journal"), self.sjoin("undo")),
-                   (self.join("journal.dirstate"), self.join("undo.dirstate"))]
+                   (self.join("journal.dirstate"), self.join("undo.dirstate")),
+                   (self.join("journal.branch"), self.join("undo.branch"))]
         tr = transaction.transaction(self.ui.warn, self.sopener,
                                        self.sjoin("journal"),
                                        aftertrans(renames))
@@ -539,18 +579,21 @@
                 self.ui.status(_("rolling back last transaction\n"))
                 transaction.rollback(self.sopener, self.sjoin("undo"))
                 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
+                branch = self.opener("undo.branch").read()
+                self.dirstate.setbranch(branch)
                 self.invalidate()
                 self.dirstate.invalidate()
             else:
                 self.ui.warn(_("no rollback information available\n"))
         finally:
-            del wlock, lock
+            del lock, wlock
 
     def invalidate(self):
         for a in "changelog manifest".split():
             if hasattr(self, a):
                 self.__delattr__(a)
         self.tagscache = None
+        self._tagstypecache = None
         self.nodetagscache = None
 
     def _lock(self, lockname, wait, releasefn, acquirefn, desc):
@@ -627,7 +670,9 @@
             elif fp1 != nullid: # copied on local side, reversed
                 meta["copyrev"] = hex(manifest2.get(cp))
                 fp2 = fp1
-            else: # directory rename
+            elif cp in manifest2: # directory rename on local side
+                meta["copyrev"] = hex(manifest2[cp])
+            else: # directory rename on remote side
                 meta["copyrev"] = hex(manifest1.get(cp, nullid))
             self.ui.debug(_(" %s: copy %s:%s\n") %
                           (fn, cp, meta["copyrev"]))
@@ -641,7 +686,7 @@
                 fp2 = nullid
 
         # is the file unmodified from the parent? report existing entry
-        if fp2 == nullid and not fl.cmp(fp1, t):
+        if fp2 == nullid and not fl.cmp(fp1, t) and not meta:
             return fp1
 
         changelist.append(fn)
@@ -651,12 +696,15 @@
         if p1 is None:
             p1, p2 = self.dirstate.parents()
         return self.commit(files=files, text=text, user=user, date=date,
-                           p1=p1, p2=p2, extra=extra)
+                           p1=p1, p2=p2, extra=extra, empty_ok=True)
 
     def commit(self, files=None, text="", user=None, date=None,
                match=util.always, force=False, force_editor=False,
-               p1=None, p2=None, extra={}):
+               p1=None, p2=None, extra={}, empty_ok=False):
         wlock = lock = tr = None
+        valid = 0 # don't save the dirstate if this isn't set
+        if files:
+            files = util.unique(files)
         try:
             commit = []
             remove = []
@@ -733,7 +781,8 @@
                     new[f] = self.filecommit(f, m1, m2, linkrev, trp, changed)
                     new_exec = is_exec(f)
                     new_link = is_link(f)
-                    if not changed or changed[-1] != f:
+                    if ((not changed or changed[-1] != f) and
+                        m2.get(f) != new[f]):
                         # mention the file in the changelog if some
                         # flag changed, even if there was no content
                         # change.
@@ -742,6 +791,9 @@
                         if old_exec != new_exec or old_link != new_link:
                             changed.append(f)
                     m1.set(f, new_exec, new_link)
+                    if use_dirstate:
+                        self.dirstate.normal(f)
+
                 except (OSError, IOError):
                     if use_dirstate:
                         self.ui.warn(_("trouble committing %s!\n") % f)
@@ -768,16 +820,19 @@
             new.sort()
 
             user = user or self.ui.username()
-            if not text or force_editor:
+            if (not empty_ok and not text) or force_editor:
                 edittext = []
                 if text:
                     edittext.append(text)
                 edittext.append("")
+                edittext.append(_("HG: Enter commit message."
+                                  "  Lines beginning with 'HG:' are removed."))
+                edittext.append("HG: --")
                 edittext.append("HG: user: %s" % user)
                 if p2 != nullid:
                     edittext.append("HG: branch merge")
                 if branchname:
-                    edittext.append("HG: branch %s" % util.tolocal(branchname))
+                    edittext.append("HG: branch '%s'" % util.tolocal(branchname))
                 edittext.extend(["HG: changed %s" % f for f in changed])
                 edittext.extend(["HG: removed %s" % f for f in removed])
                 if not changed and not remove:
@@ -789,14 +844,17 @@
                 text = self.ui.edit("\n".join(edittext), user)
                 os.chdir(olddir)
 
-            lines = [line.rstrip() for line in text.rstrip().splitlines()]
-            while lines and not lines[0]:
-                del lines[0]
-            if not lines:
-                return None
-            text = '\n'.join(lines)
             if branchname:
                 extra["branch"] = branchname
+
+            if use_dirstate:
+                lines = [line.rstrip() for line in text.rstrip().splitlines()]
+                while lines and not lines[0]:
+                    del lines[0]
+                if not lines:
+                    raise util.Abort(_("empty commit message"))
+                text = '\n'.join(lines)
+
             n = self.changelog.add(mn, changed + removed, text, trp, p1, p2,
                                    user, date, extra)
             self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
@@ -809,15 +867,16 @@
             if use_dirstate or update_dirstate:
                 self.dirstate.setparents(n)
                 if use_dirstate:
-                    for f in new:
-                        self.dirstate.normal(f)
                     for f in removed:
                         self.dirstate.forget(f)
+            valid = 1 # our dirstate updates are complete
 
             self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
             return n
         finally:
-            del lock, wlock, tr
+            if not valid: # don't save our updated dirstate
+                self.dirstate.invalidate()
+            del tr, lock, wlock
 
     def walk(self, node=None, files=[], match=util.always, badmatch=None):
         '''
@@ -955,7 +1014,7 @@
             mf2keys.sort()
             getnode = lambda fn: mf1.get(fn, nullid)
             for fn in mf2keys:
-                if mf1.has_key(fn):
+                if fn in mf1:
                     if (mf1.flags(fn) != mf2.flags(fn) or
                         (mf1[fn] != mf2[fn] and
                          (mf2[fn] != "" or fcmp(fn, getnode)))):
@@ -976,12 +1035,14 @@
     def add(self, list):
         wlock = self.wlock()
         try:
+            rejected = []
             for f in list:
                 p = self.wjoin(f)
                 try:
                     st = os.lstat(p)
                 except:
                     self.ui.warn(_("%s does not exist!\n") % f)
+                    rejected.append(f)
                     continue
                 if st.st_size > 10000000:
                     self.ui.warn(_("%s: files over 10MB may cause memory and"
@@ -991,10 +1052,14 @@
                 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
                     self.ui.warn(_("%s not added: only files and symlinks "
                                    "supported currently\n") % f)
-                elif self.dirstate[f] in 'an':
+                    rejected.append(p)
+                elif self.dirstate[f] in 'amn':
                     self.ui.warn(_("%s already tracked!\n") % f)
+                elif self.dirstate[f] == 'r':
+                    self.dirstate.normallookup(f)
                 else:
                     self.dirstate.add(f)
+            return rejected
         finally:
             del wlock
 
@@ -1035,14 +1100,14 @@
     def undelete(self, list):
         wlock = None
         try:
-            p = self.dirstate.parents()[0]
-            mn = self.changelog.read(p)[0]
-            m = self.manifest.read(mn)
+            manifests = [self.manifest.read(self.changelog.read(p)[0])
+                         for p in self.dirstate.parents() if p != nullid]
             wlock = self.wlock()
             for f in list:
                 if self.dirstate[f] != 'r':
                     self.ui.warn("%s not removed!\n" % f)
                 else:
+                    m = f in manifests[0] and manifests[0] or manifests[1]
                     t = self.file(f).read(m[f])
                     self.wwrite(f, t, m.flags(f))
                     self.dirstate.normal(f)
@@ -1344,7 +1409,7 @@
     def pull(self, remote, heads=None, force=False):
         lock = self.lock()
         try:
-            fetch = self.findincoming(remote, force=force)
+            fetch = self.findincoming(remote, heads=heads, force=force)
             if fetch == [nullid]:
                 self.ui.status(_("requesting all changes\n"))
 
@@ -1455,14 +1520,15 @@
             return remote.unbundle(cg, remote_heads, 'push')
         return ret[1]
 
-    def changegroupinfo(self, nodes):
-        self.ui.note(_("%d changesets found\n") % len(nodes))
+    def changegroupinfo(self, nodes, source):
+        if self.ui.verbose or source == 'bundle':
+            self.ui.status(_("%d changesets found\n") % len(nodes))
         if self.ui.debugflag:
             self.ui.debug(_("List of changesets:\n"))
             for node in nodes:
                 self.ui.debug("%s\n" % hex(node))
 
-    def changegroupsubset(self, bases, heads, source):
+    def changegroupsubset(self, bases, heads, source, extranodes=None):
         """This function generates a changegroup consisting of all the nodes
         that are descendents of any of the bases, and ancestors of any of
         the heads.
@@ -1472,7 +1538,15 @@
         is non-trivial.
 
         Another wrinkle is doing the reverse, figuring out which changeset in
-        the changegroup a particular filenode or manifestnode belongs to."""
+        the changegroup a particular filenode or manifestnode belongs to.
+        
+        The caller can specify some nodes that must be included in the
+        changegroup using the extranodes argument.  It should be a dict
+        where the keys are the filenames (or 1 for the manifest), and the
+        values are lists of (node, linknode) tuples, where node is a wanted
+        node and linknode is the changelog node that should be transmitted as
+        the linkrev.
+        """
 
         self.hook('preoutgoing', throw=True, source=source)
 
@@ -1482,7 +1556,7 @@
         # msng is short for missing - compute the list of changesets in this
         # changegroup.
         msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
-        self.changegroupinfo(msng_cl_lst)
+        self.changegroupinfo(msng_cl_lst, source)
         # Some bases may turn out to be superfluous, and some heads may be
         # too.  nodesbetween will return the minimal set of bases and heads
         # necessary to re-create the changegroup.
@@ -1611,12 +1685,9 @@
                 if r == next_rev[0]:
                     # If the last rev we looked at was the one just previous,
                     # we only need to see a diff.
-                    delta = mdiff.patchtext(mnfst.delta(mnfstnode))
+                    deltamf = mnfst.readdelta(mnfstnode)
                     # For each line in the delta
-                    for dline in delta.splitlines():
-                        # get the filename and filenode for that line
-                        f, fnode = dline.split('\0')
-                        fnode = bin(fnode[:40])
+                    for f, fnode in deltamf.items():
                         f = changedfiles.get(f, None)
                         # And if the file is in the list of files we care
                         # about.
@@ -1668,6 +1739,15 @@
                 return msngset[fnode]
             return lookup_filenode_link
 
+        # Add the nodes that were explicitly requested.
+        def add_extra_nodes(name, nodes):
+            if not extranodes or name not in extranodes:
+                return
+
+            for node, linknode in extranodes[name]:
+                if node not in nodes:
+                    nodes[node] = linknode
+
         # Now that we have all theses utility functions to help out and
         # logically divide up the task, generate the group.
         def gengroup():
@@ -1683,6 +1763,7 @@
             # The list of manifests has been collected by the generator
             # calling our functions back.
             prune_manifests()
+            add_extra_nodes(1, msng_mnfst_set)
             msng_mnfst_lst = msng_mnfst_set.keys()
             # Sort the manifestnodes by revision number.
             msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
@@ -1698,14 +1779,23 @@
             msng_mnfst_lst = None
             msng_mnfst_set.clear()
 
+            if extranodes:
+                for fname in extranodes:
+                    if isinstance(fname, int):
+                        continue
+                    add_extra_nodes(fname,
+                                    msng_filenode_set.setdefault(fname, {}))
+                    changedfiles[fname] = 1
             changedfiles = changedfiles.keys()
             changedfiles.sort()
             # Go through all our files in order sorted by name.
             for fname in changedfiles:
                 filerevlog = self.file(fname)
+                if filerevlog.count() == 0:
+                    raise util.Abort(_("empty or missing revlog for %s") % fname)
                 # Toss out the filenodes that the recipient isn't really
                 # missing.
-                if msng_filenode_set.has_key(fname):
+                if fname in msng_filenode_set:
                     prune_filenodes(fname, filerevlog)
                     msng_filenode_lst = msng_filenode_set[fname].keys()
                 else:
@@ -1713,7 +1803,8 @@
                 # If any filenodes are left, generate the group for them,
                 # otherwise don't bother.
                 if len(msng_filenode_lst) > 0:
-                    yield changegroup.genchunk(fname)
+                    yield changegroup.chunkheader(len(fname))
+                    yield fname
                     # Sort the filenodes by their revision #
                     msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
                     # Create a group generator and only pass in a changenode
@@ -1723,7 +1814,7 @@
                                              lookup_filenode_link_func(fname))
                     for chnk in group:
                         yield chnk
-                if msng_filenode_set.has_key(fname):
+                if fname in msng_filenode_set:
                     # Don't need this anymore, toss it to free memory.
                     del msng_filenode_set[fname]
             # Signal that no more groups are left.
@@ -1746,7 +1837,7 @@
         cl = self.changelog
         nodes = cl.nodesbetween(basenodes, None)[0]
         revset = dict.fromkeys([cl.rev(n) for n in nodes])
-        self.changegroupinfo(nodes)
+        self.changegroupinfo(nodes, source)
 
         def identity(x):
             return x
@@ -1786,10 +1877,13 @@
 
             for fname in changedfiles:
                 filerevlog = self.file(fname)
+                if filerevlog.count() == 0:
+                    raise util.Abort(_("empty or missing revlog for %s") % fname)
                 nodeiter = gennodelst(filerevlog)
                 nodeiter = list(nodeiter)
                 if nodeiter:
-                    yield changegroup.genchunk(fname)
+                    yield changegroup.chunkheader(len(fname))
+                    yield fname
                     lookup = lookuprevlink_func(filerevlog)
                     for chnk in filerevlog.group(nodeiter, lookup):
                         yield chnk
@@ -1801,7 +1895,7 @@
 
         return util.chunkbuffer(gengroup())
 
-    def addchangegroup(self, source, srctype, url):
+    def addchangegroup(self, source, srctype, url, emptyok=False):
         """add changegroup to repo.
 
         return values:
@@ -1837,7 +1931,7 @@
             self.ui.status(_("adding changesets\n"))
             cor = cl.count() - 1
             chunkiter = changegroup.chunkiter(source)
-            if cl.addgroup(chunkiter, csmap, trp, 1) is None:
+            if cl.addgroup(chunkiter, csmap, trp, 1) is None and not emptyok:
                 raise util.Abort(_("received changelog group is empty"))
             cnr = cl.count() - 1
             changesets = cnr - cor
@@ -1888,6 +1982,10 @@
             del tr
 
         if changesets > 0:
+            # forcefully update the on-disk branch cache
+            self.ui.debug(_("updating the branch cache\n"))
+            self.branchcache = None
+            self.branchtags()
             self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
                       source=srctype, url=url)
 
--- a/mercurial/lsprof.py	Thu Jul 26 07:56:27 2007 -0400
+++ b/mercurial/lsprof.py	Wed Feb 06 19:57:52 2008 -0800
@@ -1,28 +1,19 @@
-# this is copied from the lsprof distro because somehow
-# it is not installed by distutils
-#
-# small modifications made
+#! /usr/bin/env python
 
 import sys
-try:
-    from _lsprof import Profiler, profiler_entry, profiler_subentry
-except ImportError, inst:
-    import packagescan
-    if packagescan.scan_in_progress:
-        raise packagescan.SkipPackage('_lsprof not available')
-    raise
+from _lsprof import Profiler, profiler_entry, profiler_subentry
 
 __all__ = ['profile', 'Stats']
 
 def profile(f, *args, **kwds):
     """XXX docstring"""
     p = Profiler()
-    p.enable(subcalls=True)
+    p.enable(subcalls=True, builtins=True)
     try:
-        ret = f(*args, **kwds)
+        f(*args, **kwds)
     finally:
         p.disable()
-    return ret, Stats(p.getstats())
+    return Stats(p.getstats())
 
 
 class Stats(object):
@@ -49,14 +40,14 @@
         d = self.data
         if top is not None:
             d = d[:top]
-        cols = "% 12s %11.4f %11.4f   %s\n"
-        hcols = "% 12s %12s %12s %s\n"
-        cols2 = "+%12s %11.4f %11.4f +  %s\n"
-        file.write(hcols % ("CallCount", "Total(s)",
-                            "Inline(s)", "module:lineno(function)"))
+        cols = "% 12s %12s %11.4f %11.4f   %s\n"
+        hcols = "% 12s %12s %12s %12s %s\n"
+        cols2 = "+%12s %12s %11.4f %11.4f +  %s\n"
+        file.write(hcols % ("CallCount", "Recursive", "Total(ms)",
+                            "Inline(ms)", "module:lineno(function)"))
         count = 0
         for e in d:
-            file.write(cols % (e.callcount, e.totaltime,
+            file.write(cols % (e.callcount, e.reccallcount, e.totaltime,
                                e.inlinetime, label(e.code)))
             count += 1
             if limit is not None and count == limit:
@@ -64,7 +55,7 @@
             ccount = 0
             if e.calls:
                 for se in e.calls:
-                    file.write(cols % ("+%s" % se.callcount,
+                    file.write(cols % ("+%s" % se.callcount, se.reccallcount,
                                        se.totaltime, se.inlinetime,
                                        "+%s" % label(se.code)))
                     count += 1
@@ -83,11 +74,11 @@
             e = self.data[i]
             if not isinstance(e.code, str):
                 self.data[i] = type(e)((label(e.code),) + e[1:])
-                if e.calls:
-                    for j in range(len(e.calls)):
-                        se = e.calls[j]
-                        if not isinstance(se.code, str):
-                            e.calls[j] = type(se)((label(se.code),) + se[1:])
+            if e.calls:
+                for j in range(len(e.calls)):
+                    se = e.calls[j]
+                    if not isinstance(se.code, str):
+                        e.calls[j] = type(se)((label(se.code),) + se[1:])
 
 _fn2mod = {}
 
@@ -97,7 +88,7 @@
     try:
         mname = _fn2mod[code.co_filename]
     except KeyError:
-        for k, v in sys.modules.iteritems():
+        for k, v in sys.modules.items():
             if v is None:
                 continue
             if not hasattr(v, '__file__'):
--- a/mercurial/mail.py	Thu Jul 26 07:56:27 2007 -0400
+++ b/mercurial/mail.py	Wed Feb 06 19:57:52 2008 -0800
@@ -6,11 +6,10 @@
 # of the GNU General Public License, incorporated herein by reference.
 
 from i18n import _
-import os, smtplib, templater, util, socket
+import os, smtplib, util, socket
 
 def _smtp(ui):
-    '''send mail using smtp.'''
-
+    '''build an smtp connection and return a function to send mail'''
     local_hostname = ui.config('smtp', 'local_hostname')
     s = smtplib.SMTP(local_hostname=local_hostname)
     mailhost = ui.config('smtp', 'host')
@@ -30,44 +29,48 @@
         s.ehlo()
     username = ui.config('smtp', 'username')
     password = ui.config('smtp', 'password')
+    if username and not password:
+        password = ui.getpass()
     if username and password:
         ui.note(_('(authenticating to mail server as %s)\n') %
                   (username))
         s.login(username, password)
-    return s
 
-class _sendmail(object):
-    '''send mail using sendmail.'''
+    def send(sender, recipients, msg):
+        try:
+            return s.sendmail(sender, recipients, msg)
+        except smtplib.SMTPRecipientsRefused, inst:
+            recipients = [r[1] for r in inst.recipients.values()]
+            raise util.Abort('\n' + '\n'.join(recipients))
+        except smtplib.SMTPException, inst:
+            raise util.Abort(inst)
 
-    def __init__(self, ui, program):
-        self.ui = ui
-        self.program = program
+    return send
 
-    def sendmail(self, sender, recipients, msg):
-        cmdline = '%s -f %s %s' % (
-            self.program, templater.email(sender),
-            ' '.join(map(templater.email, recipients)))
-        self.ui.note(_('sending mail: %s\n') % cmdline)
-        fp = os.popen(cmdline, 'w')
-        fp.write(msg)
-        ret = fp.close()
-        if ret:
-            raise util.Abort('%s %s' % (
-                os.path.basename(self.program.split(None, 1)[0]),
-                util.explain_exit(ret)[0]))
+def _sendmail(ui, sender, recipients, msg):
+    '''send mail using sendmail.'''
+    program = ui.config('email', 'method')
+    cmdline = '%s -f %s %s' % (program, util.email(sender),
+                               ' '.join(map(util.email, recipients)))
+    ui.note(_('sending mail: %s\n') % cmdline)
+    fp = os.popen(cmdline, 'w')
+    fp.write(msg)
+    ret = fp.close()
+    if ret:
+        raise util.Abort('%s %s' % (
+            os.path.basename(program.split(None, 1)[0]),
+            util.explain_exit(ret)[0]))
 
 def connect(ui):
-    '''make a mail connection. object returned has one method, sendmail.
+    '''make a mail connection. return a function to send mail.
     call as sendmail(sender, list-of-recipients, msg).'''
-
-    method = ui.config('email', 'method', 'smtp')
-    if method == 'smtp':
+    if ui.config('email', 'method', 'smtp') == 'smtp':
         return _smtp(ui)
-
-    return _sendmail(ui, method)
+    return lambda s, r, m: _sendmail(ui, s, r, m)
 
 def sendmail(ui, sender, recipients, msg):
-    return connect(ui).sendmail(sender, recipients, msg)
+    send = connect(ui)
+    return send(sender, recipients, msg)
 
 def validateconfig(ui):
     '''determine if we have enough config data to try sending email.'''
--- a/mercurial/manifest.py	Thu Jul 26 07:56:27 2007 -0400
+++ b/mercurial/manifest.py	Wed Feb 06 19:57:52 2008 -0800
@@ -38,10 +38,11 @@
 
     def parse(self, lines):
         mfdict = manifestdict()
+        fdict = mfdict._flags
         for l in lines.splitlines():
             f, n = l.split('\0')
             if len(n) > 40:
-                mfdict._flags[f] = n[40:]
+                fdict[f] = n[40:]
                 mfdict[f] = bin(n[:40])
             else:
                 mfdict[f] = bin(n)
--- a/mercurial/mdiff.py	Thu Jul 26 07:56:27 2007 -0400
+++ b/mercurial/mdiff.py	Wed Feb 06 19:57:52 2008 -0800
@@ -30,7 +30,7 @@
     defaults = {
         'context': 3,
         'text': False,
-        'showfunc': True,
+        'showfunc': False,
         'git': False,
         'nodates': False,
         'ignorews': False,
@@ -59,11 +59,11 @@
         text = re.sub('\n+', '', text)
     return text
 
-def unidiff(a, ad, b, bd, fn, r=None, opts=defaultopts):
+def unidiff(a, ad, b, bd, fn1, fn2, r=None, opts=defaultopts):
     def datetag(date, addtab=True):
         if not opts.git and not opts.nodates:
             return '\t%s\n' % date
-        if addtab and ' ' in fn:
+        if addtab and ' ' in fn1:
             return '\t\n'
         return '\n'
 
@@ -76,29 +76,29 @@
             return md5.new(v).digest()
         if a and b and len(a) == len(b) and h(a) == h(b):
             return ""
-        l = ['Binary file %s has changed\n' % fn]
+        l = ['Binary file %s has changed\n' % fn1]
     elif not a:
         b = splitnewlines(b)
         if a is None:
             l1 = '--- /dev/null%s' % datetag(epoch, False)
         else:
-            l1 = "--- %s%s" % ("a/" + fn, datetag(ad))
-        l2 = "+++ %s%s" % ("b/" + fn, datetag(bd))
+            l1 = "--- %s%s" % ("a/" + fn1, datetag(ad))
+        l2 = "+++ %s%s" % ("b/" + fn2, datetag(bd))
         l3 = "@@ -0,0 +1,%d @@\n" % len(b)
         l = [l1, l2, l3] + ["+" + e for e in b]
     elif not b:
         a = splitnewlines(a)
-        l1 = "--- %s%s" % ("a/" + fn, datetag(ad))
+        l1 = "--- %s%s" % ("a/" + fn1, datetag(ad))
         if b is None:
             l2 = '+++ /dev/null%s' % datetag(epoch, False)
         else:
-            l2 = "+++ %s%s" % ("b/" + fn, datetag(bd))
+            l2 = "+++ %s%s" % ("b/" + fn2, datetag(bd))
         l3 = "@@ -1,%d +0,0 @@\n" % len(a)
         l = [l1, l2, l3] + ["-" + e for e in a]
     else:
         al = splitnewlines(a)
         bl = splitnewlines(b)
-        l = list(bunidiff(a, b, al, bl, "a/" + fn, "b/" + fn, opts=opts))
+        l = list(bunidiff(a, b, al, bl, "a/" + fn1, "b/" + fn2, opts=opts))
         if not l: return ""
         # difflib uses a space, rather than a tab
         l[0] = "%s%s" % (l[0][:-2], datetag(ad))
@@ -110,7 +110,7 @@
 
     if r:
         l.insert(0, "diff %s %s\n" %
-                    (' '.join(["-r %s" % rev for rev in r]), fn))
+                    (' '.join(["-r %s" % rev for rev in r]), fn1))
 
     return "".join(l)
 
@@ -245,6 +245,9 @@
 def get_matching_blocks(a, b):
     return [(d[0], d[2], d[1] - d[0]) for d in bdiff.blocks(a, b)]
 
+def trivialdiffheader(length):
+    return struct.pack(">lll", 0, 0, length)
+
 patches = mpatch.patches
 patchedsize = mpatch.patchedsize
 textdiff = bdiff.bdiff
--- a/mercurial/merge.py	Thu Jul 26 07:56:27 2007 -0400
+++ b/mercurial/merge.py	Wed Feb 06 19:57:52 2008 -0800
@@ -7,57 +7,7 @@
 
 from node import *
 from i18n import _
-import errno, util, os, tempfile, context
-
-def filemerge(repo, fw, fo, wctx, mctx):
-    """perform a 3-way merge in the working directory
-
-    fw = filename in the working directory
-    fo = filename in other parent
-    wctx, mctx = working and merge changecontexts
-    """
-
-    def temp(prefix, ctx):
-        pre = "%s~%s." % (os.path.basename(ctx.path()), prefix)
-        (fd, name) = tempfile.mkstemp(prefix=pre)
-        data = repo.wwritedata(ctx.path(), ctx.data())
-        f = os.fdopen(fd, "wb")
-        f.write(data)
-        f.close()
-        return name
-
-    fcm = wctx.filectx(fw)
-    fco = mctx.filectx(fo)
-
-    if not fco.cmp(fcm.data()): # files identical?
-        return None
-
-    fca = fcm.ancestor(fco)
-    if not fca:
-        fca = repo.filectx(fw, fileid=nullrev)
-    a = repo.wjoin(fw)
-    b = temp("base", fca)
-    c = temp("other", fco)
-
-    if fw != fo:
-        repo.ui.status(_("merging %s and %s\n") % (fw, fo))
-    else:
-        repo.ui.status(_("merging %s\n") % fw)
-
-    repo.ui.debug(_("my %s other %s ancestor %s\n") % (fcm, fco, fca))
-
-    cmd = (os.environ.get("HGMERGE") or repo.ui.config("ui", "merge")
-           or "hgmerge")
-    r = util.system('%s "%s" "%s" "%s"' % (cmd, a, b, c), cwd=repo.root,
-                    environ={'HG_FILE': fw,
-                             'HG_MY_NODE': str(wctx.parents()[0]),
-                             'HG_OTHER_NODE': str(mctx)})
-    if r:
-        repo.ui.warn(_("merging %s failed!\n") % fw)
-
-    os.unlink(b)
-    os.unlink(c)
-    return r
+import errno, util, os, heapq, filemerge
 
 def checkunknown(wctx, mctx):
     "check for collisions between unknown files and files in mctx"
@@ -65,8 +15,9 @@
     for f in wctx.unknown():
         if f in man:
             if mctx.filectx(f).cmp(wctx.filectx(f).data()):
-                raise util.Abort(_("untracked local file '%s' differs"
-                                   " from remote version") % f)
+                raise util.Abort(_("untracked file in working directory differs"
+                                   " from file in requested revision: '%s'")
+                                 % f)
 
 def checkcollision(mctx):
     "check for case folding collisions in the destination context"
@@ -175,7 +126,7 @@
                 continue
             # named changed on only one side?
             if ca.path() == c.path() or ca.path() == c2.path():
-                if c == ca or c2 == ca: # no merge needed, ignore copy
+                if c == ca and c2 == ca: # no merge needed, ignore copy
                     continue
                 copy[c.path()] = of
 
@@ -186,25 +137,46 @@
     if not m1 or not m2 or not ma:
         return {}, {}
 
+    repo.ui.debug(_("  searching for copies back to rev %d\n") % limit)
+
     u1 = nonoverlap(m1, m2, ma)
     u2 = nonoverlap(m2, m1, ma)
 
+    if u1:
+        repo.ui.debug(_("  unmatched files in local:\n   %s\n")
+                      % "\n   ".join(u1))
+    if u2:
+        repo.ui.debug(_("  unmatched files in other:\n   %s\n")
+                      % "\n   ".join(u2))
+
     for f in u1:
         checkcopies(ctx(f, m1[f]), m2, ma)
 
     for f in u2:
         checkcopies(ctx(f, m2[f]), m1, ma)
 
-    d2 = {}
+    diverge2 = {}
     for of, fl in diverge.items():
-        for f in fl:
-            fo = list(fl)
-            fo.remove(f)
-            d2[f] = (of, fo)
+        if len(fl) == 1:
+            del diverge[of] # not actually divergent
+        else:
+            diverge2.update(dict.fromkeys(fl)) # reverse map for below
+
+    if fullcopy:
+        repo.ui.debug(_("  all copies found (* = to merge, ! = divergent):\n"))
+        for f in fullcopy:
+            note = ""
+            if f in copy: note += "*"
+            if f in diverge2: note += "!"
+            repo.ui.debug(_("   %s -> %s %s\n") % (f, fullcopy[f], note))
+
+    del diverge2
 
     if not fullcopy or not repo.ui.configbool("merge", "followdirs", True):
         return copy, diverge
 
+    repo.ui.debug(_("  checking for directory renames\n"))
+
     # generate a directory move map
     d1, d2 = dirs(m1), dirs(m2)
     invalid = {}
@@ -239,6 +211,9 @@
     if not dirmove:
         return copy, diverge
 
+    for d in dirmove:
+        repo.ui.debug(_("  dir %s -> %s\n") % (d, dirmove[d]))
+
     # check unaccounted nonoverlapping files against directory moves
     for f in u1 + u2:
         if f not in fullcopy:
@@ -246,10 +221,63 @@
                 if f.startswith(d):
                     # new file added in a directory that was moved, move it
                     copy[f] = dirmove[d] + f[len(d):]
+                    repo.ui.debug(_("  file %s -> %s\n") % (f, copy[f]))
                     break
 
     return copy, diverge
 
+def symmetricdifference(repo, rev1, rev2):
+    """symmetric difference of the sets of ancestors of rev1 and rev2
+
+    I.e. revisions that are ancestors of rev1 or rev2, but not both.
+    """
+    # basic idea:
+    # - mark rev1 and rev2 with different colors
+    # - walk the graph in topological order with the help of a heap;
+    #   for each revision r:
+    #     - if r has only one color, we want to return it
+    #     - add colors[r] to its parents
+    #
+    # We keep track of the number of revisions in the heap that
+    # we may be interested in.  We stop walking the graph as soon
+    # as this number reaches 0.
+    WHITE = 1
+    BLACK = 2
+    ALLCOLORS = WHITE | BLACK
+    colors = {rev1: WHITE, rev2: BLACK}
+
+    cl = repo.changelog
+
+    visit = [-rev1, -rev2]
+    heapq.heapify(visit)
+    n_wanted = len(visit)
+    ret = []
+
+    while n_wanted:
+        r = -heapq.heappop(visit)
+        wanted = colors[r] != ALLCOLORS
+        n_wanted -= wanted
+        if wanted:
+            ret.append(r)
+
+        for p in cl.parentrevs(r):
+            if p == nullrev:
+                continue
+            if p not in colors:
+                # first time we see p; add it to visit
+                n_wanted += wanted
+                colors[p] = colors[r]
+                heapq.heappush(visit, -p)
+            elif colors[p] != ALLCOLORS and colors[p] != colors[r]:
+                # at first we thought we wanted p, but now
+                # we know we don't really want it
+                n_wanted -= 1
+                colors[p] |= colors[r]
+
+        del colors[r]
+
+    return ret
+
 def manifestmerge(repo, p1, p2, pa, overwrite, partial):
     """
     Merge p1 and p2 with ancestor ma and generate merge action list
@@ -275,20 +303,35 @@
         if not f2:
             f2 = f
             fa = f
-        a, b, c = ma.execf(fa), m1.execf(f), m2.execf(f2)
-        if ((a^b) | (a^c)) ^ a:
-            return 'x'
-        a, b, c = ma.linkf(fa), m1.linkf(f), m2.linkf(f2)
-        if ((a^b) | (a^c)) ^ a:
-            return 'l'
-        return ''
+        a, m, n = ma.flags(fa), m1.flags(f), m2.flags(f2)
+        if m == n: # flags agree
+            return m # unchanged
+        if m and n: # flags are set but don't agree
+            if not a: # both differ from parent
+                r = repo.ui.prompt(
+                    _(" conflicting flags for %s\n"
+                      "(n)one, e(x)ec or sym(l)ink?") % f, "[nxl]", "n")
+                return r != "n" and r or ''
+            if m == a:
+                return n # changed from m to n
+            return m # changed from n to m
+        if m and m != a: # changed from a to m
+            return m
+        if n and n != a: # changed from a to n
+            return n
+        return '' # flag was cleared
 
     def act(msg, m, f, *args):
         repo.ui.debug(" %s: %s -> %s\n" % (f, msg, m))
         action.append((f, m) + args)
 
     if not (backwards or overwrite):
-        copy, diverge = findcopies(repo, m1, m2, ma, pa.rev())
+        rev1 = p1.rev()
+        if rev1 is None:
+            # p1 is a workingctx
+            rev1 = p1.parents()[0].rev()
+        limit = min(symmetricdifference(repo, rev1, p2.rev()))
+        copy, diverge = findcopies(repo, m1, m2, ma, limit)
 
     for of, fl in diverge.items():
         act("divergent renames", "dr", of, fl)
@@ -300,24 +343,31 @@
         if partial and not partial(f):
             continue
         if f in m2:
+            if overwrite or backwards:
+                rflags = m2.flags(f)
+            else:
+                rflags = fmerge(f)
             # are files different?
             if n != m2[f]:
                 a = ma.get(f, nullid)
-                # are both different from the ancestor?
-                if not overwrite and n != a and m2[f] != a:
-                    act("versions differ", "m", f, f, f, fmerge(f), False)
                 # are we clobbering?
-                # is remote's version newer?
+                if overwrite:
+                    act("clobbering", "g", f, rflags)
                 # or are we going back in time and clean?
-                elif overwrite or m2[f] != a or (backwards and not n[20:]):
-                    act("remote is newer", "g", f, m2.flags(f))
+                elif backwards and not n[20:]:
+                    act("reverting", "g", f, rflags)
+                # are both different from the ancestor?
+                elif n != a and m2[f] != a:
+                    act("versions differ", "m", f, f, f, rflags, False)
+                # is remote's version newer?
+                elif m2[f] != a:
+                    act("remote is newer", "g", f, rflags)
                 # local is newer, not overwrite, check mode bits
-                elif fmerge(f) != m1.flags(f):
-                    act("update permissions", "e", f, m2.flags(f))
+                elif m1.flags(f) != rflags:
+                    act("update permissions", "e", f, rflags)
             # contents same, check mode bits
-            elif m1.flags(f) != m2.flags(f):
-                if overwrite or fmerge(f) != m1.flags(f):
-                    act("update permissions", "e", f, m2.flags(f))
+            elif m1.flags(f) != rflags:
+                act("update permissions", "e", f, rflags)
         elif f in copied:
             continue
         elif f in copy:
@@ -334,8 +384,9 @@
         elif f in ma:
             if n != ma[f] and not overwrite:
                 if repo.ui.prompt(
-                    (_(" local changed %s which remote deleted\n") % f) +
-                    _("(k)eep or (d)elete?"), _("[kd]"), _("k")) == _("d"):
+                    _(" local changed %s which remote deleted\n"
+                      "use (c)hanged version or (d)elete?") % f,
+                    _("[cd]"), _("c")) == _("d"):
                     act("prompt delete", "r", f)
             else:
                 act("other deleted", "r", f)
@@ -367,8 +418,9 @@
                 act("recreating", "g", f, m2.flags(f))
             elif n != ma[f]:
                 if repo.ui.prompt(
-                    (_("remote changed %s which local deleted\n") % f) +
-                    _("(k)eep or (d)elete?"), _("[kd]"), _("k")) == _("k"):
+                    _("remote changed %s which local deleted\n"
+                      "use (c)hanged version or leave (d)eleted?") % f,
+                    _("[cd]"), _("c")) == _("c"):
                     act("prompt recreating", "g", f, m2.flags(f))
         else:
             act("remote created", "g", f, m2.flags(f))
@@ -380,13 +432,24 @@
 
     updated, merged, removed, unresolved = 0, 0, 0, 0
     action.sort()
+    # prescan for copy/renames
+    for a in action:
+        f, m = a[:2]
+        if m == 'm': # merge
+            f2, fd, flags, move = a[2:]
+            if f != fd:
+                repo.ui.debug(_("copying %s to %s\n") % (f, fd))
+                repo.wwrite(fd, repo.wread(f), flags)
+
+    audit_path = util.path_auditor(repo.root)
+
     for a in action:
         f, m = a[:2]
         if f and f[0] == "/":
             continue
         if m == "r": # remove
             repo.ui.note(_("removing %s\n") % f)
-            util.audit_path(f)
+            audit_path(f)
             try:
                 util.unlink(repo.wjoin(f))
             except OSError, inst:
@@ -396,7 +459,7 @@
             removed += 1
         elif m == "m": # merge
             f2, fd, flags, move = a[2:]
-            r = filemerge(repo, f, f2, wctx, mctx)
+            r = filemerge.filemerge(repo, f, fd, f2, wctx, mctx)
             if r > 0:
                 unresolved += 1
             else:
@@ -404,13 +467,10 @@
                     updated += 1
                 else:
                     merged += 1
-            if f != fd:
-                repo.ui.debug(_("copying %s to %s\n") % (f, fd))
-                repo.wwrite(fd, repo.wread(f), flags)
-                if move:
-                    repo.ui.debug(_("removing %s\n") % f)
-                    os.unlink(repo.wjoin(f))
-            util.set_exec(repo.wjoin(fd), "x" in flags)
+            util.set_flags(repo.wjoin(fd), flags)
+            if f != fd and move and util.lexists(repo.wjoin(f)):
+                repo.ui.debug(_("removing %s\n") % f)
+                os.unlink(repo.wjoin(f))
         elif m == "g": # get
             flags = a[2]
             repo.ui.note(_("getting %s\n") % f)
@@ -436,7 +496,7 @@
                 repo.ui.warn(" %s\n" % nf)
         elif m == "e": # exec
             flags = a[2]
-            util.set_exec(repo.wjoin(f), flags)
+            util.set_flags(repo.wjoin(f), flags)
 
     return updated, merged, removed, unresolved
 
@@ -452,7 +512,7 @@
                 repo.dirstate.forget(f)
         elif m == "f": # forget
             repo.dirstate.forget(f)
-        elif m == "g": # get
+        elif m in "ge": # get or exec change
             if branchmerge:
                 repo.dirstate.normaldirty(f)
             else:
@@ -476,7 +536,7 @@
                 # of that file some time in the past. Thus our
                 # merge will appear as a normal local file
                 # modification.
-                repo.dirstate.normaldirty(fd)
+                repo.dirstate.normallookup(fd)
                 if move:
                     repo.dirstate.forget(f)
         elif m == "d": # directory rename
@@ -513,7 +573,10 @@
             try:
                 node = repo.branchtags()[wc.branch()]
             except KeyError:
-                raise util.Abort(_("branch %s not found") % wc.branch())
+                if wc.branch() == "default": # no default branch!
+                    node = repo.lookup("tip") # update to tip
+                else:
+                    raise util.Abort(_("branch %s not found") % wc.branch())
         overwrite = force and not branchmerge
         forcemerge = force and branchmerge
         pl = wc.parents()
--- a/mercurial/mpatch.c	Thu Jul 26 07:56:27 2007 -0400
+++ b/mercurial/mpatch.c	Wed Feb 06 19:57:52 2008 -0800
@@ -24,6 +24,16 @@
 #include <stdlib.h>
 #include <string.h>
 
+/* Definitions to get compatibility with python 2.4 and earlier which
+   does not have Py_ssize_t. See also PEP 353.
+   Note: msvc (8 or earlier) does not have ssize_t, so we use Py_ssize_t.
+*/
+#if PY_VERSION_HEX < 0x02050000 && !defined(PY_SSIZE_T_MIN)
+typedef int Py_ssize_t;
+#define PY_SSIZE_T_MAX INT_MAX
+#define PY_SSIZE_T_MIN INT_MIN
+#endif
+
 #ifdef _WIN32
 # ifdef _MSC_VER
 /* msvc 6.0 has problems */
@@ -55,7 +65,7 @@
 
 struct frag {
 	int start, end, len;
-	char *data;
+	const char *data;
 };
 
 struct flist {
@@ -221,11 +231,11 @@
 }
 
 /* decode a binary patch into a hunk list */
-static struct flist *decode(char *bin, int len)
+static struct flist *decode(const char *bin, int len)
 {
 	struct flist *l;
 	struct frag *lt;
-	char *data = bin + 12, *end = bin + len;
+	const char *data = bin + 12, *end = bin + len;
 	char decode[12]; /* for dealing with alignment issues */
 
 	/* assume worst case size, we won't have many of these lists */
@@ -284,7 +294,7 @@
 	return outlen;
 }
 
-static int apply(char *buf, char *orig, int len, struct flist *l)
+static int apply(char *buf, const char *orig, int len, struct flist *l)
 {
 	struct frag *f = l->head;
 	int last = 0;
@@ -312,13 +322,17 @@
 static struct flist *fold(PyObject *bins, int start, int end)
 {
 	int len;
+	Py_ssize_t blen;
+	const char *buffer;
 
 	if (start + 1 == end) {
 		/* trivial case, output a decoded list */
 		PyObject *tmp = PyList_GetItem(bins, start);
 		if (!tmp)
 			return NULL;
-		return decode(PyString_AsString(tmp), PyString_Size(tmp));
+		if (PyObject_AsCharBuffer(tmp, &buffer, &blen))
+			return NULL;
+		return decode(buffer, blen);
 	}
 
 	/* divide and conquer, memory management is elsewhere */
@@ -332,10 +346,12 @@
 {
 	PyObject *text, *bins, *result;
 	struct flist *patch;
-	char *in, *out;
+	const char *in;
+	char *out;
 	int len, outlen;
+	Py_ssize_t inlen;
 
-	if (!PyArg_ParseTuple(args, "SO:mpatch", &text, &bins))
+	if (!PyArg_ParseTuple(args, "OO:mpatch", &text, &bins))
 		return NULL;
 
 	len = PyList_Size(bins);
@@ -345,11 +361,14 @@
 		return text;
 	}
 
+	if (PyObject_AsCharBuffer(text, &in, &inlen))
+		return NULL;
+
 	patch = fold(bins, 0, len);
 	if (!patch)
 		return NULL;
 
-	outlen = calcsize(PyString_Size(text), patch);
+	outlen = calcsize(inlen, patch);
 	if (outlen < 0) {
 		result = NULL;
 		goto cleanup;
@@ -359,9 +378,8 @@
 		result = NULL;
 		goto cleanup;
 	}
-	in = PyString_AsString(text);
 	out = PyString_AsString(result);
-	if (!apply(out, in, PyString_Size(text), patch)) {
+	if (!apply(out, in, inlen, patch)) {
 		Py_DECREF(result);
 		result = NULL;
 	}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/osutil.c	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,314 @@
+/*
+ osutil.c - native operating system services
+
+ Copyright 2007 Matt Mackall and others
+
+ This software may be used and distributed according to the terms of
+ the GNU General Public License, incorporated herein by reference.
+*/
+
+#define _ATFILE_SOURCE
+#include <Python.h>
+#include <dirent.h>
+#include <fcntl.h>
+#include <string.h>
+#include <sys/stat.h>
+#include <sys/types.h>
+#include <unistd.h>
+
+struct listdir_stat {
+	PyObject_HEAD
+	struct stat st;
+};
+
+#define listdir_slot(name) \
+    static PyObject *listdir_stat_##name(PyObject *self, void *x) \
+    { \
+        return PyInt_FromLong(((struct listdir_stat *)self)->st.name); \
+    }
+
+listdir_slot(st_dev)
+listdir_slot(st_mode)
+listdir_slot(st_nlink)
+listdir_slot(st_size)
+listdir_slot(st_mtime)
+listdir_slot(st_ctime)
+
+static struct PyGetSetDef listdir_stat_getsets[] = {
+	{"st_dev", listdir_stat_st_dev, 0, 0, 0},
+	{"st_mode", listdir_stat_st_mode, 0, 0, 0},
+	{"st_nlink", listdir_stat_st_nlink, 0, 0, 0},
+	{"st_size", listdir_stat_st_size, 0, 0, 0},
+	{"st_mtime", listdir_stat_st_mtime, 0, 0, 0},
+	{"st_ctime", listdir_stat_st_ctime, 0, 0, 0},
+	{0, 0, 0, 0, 0}
+};
+
+static PyObject *listdir_stat_new(PyTypeObject *t, PyObject *a, PyObject *k)
+{
+	return t->tp_alloc(t, 0);
+}
+
+static void listdir_stat_dealloc(PyObject *o)
+{
+	o->ob_type->tp_free(o);
+}
+
+static PyTypeObject listdir_stat_type = {
+	PyObject_HEAD_INIT(NULL)
+	0,                         /*ob_size*/
+	"osutil.stat",             /*tp_name*/
+	sizeof(struct listdir_stat), /*tp_basicsize*/
+	0,                         /*tp_itemsize*/
+	(destructor)listdir_stat_dealloc, /*tp_dealloc*/
+	0,                         /*tp_print*/
+	0,                         /*tp_getattr*/
+	0,                         /*tp_setattr*/
+	0,                         /*tp_compare*/
+	0,                         /*tp_repr*/
+	0,                         /*tp_as_number*/
+	0,                         /*tp_as_sequence*/
+	0,                         /*tp_as_mapping*/
+	0,                         /*tp_hash */
+	0,                         /*tp_call*/
+	0,                         /*tp_str*/
+	0,                         /*tp_getattro*/
+	0,                         /*tp_setattro*/
+	0,                         /*tp_as_buffer*/
+	Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, /*tp_flags*/
+	"stat objects",            /* tp_doc */
+	0,                         /* tp_traverse */
+	0,                         /* tp_clear */
+	0,                         /* tp_richcompare */
+	0,                         /* tp_weaklistoffset */
+	0,                         /* tp_iter */
+	0,                         /* tp_iternext */
+	0,                         /* tp_methods */
+	0,                         /* tp_members */
+	listdir_stat_getsets,      /* tp_getset */
+	0,                         /* tp_base */
+	0,                         /* tp_dict */
+	0,                         /* tp_descr_get */
+	0,                         /* tp_descr_set */
+	0,                         /* tp_dictoffset */
+	0,                         /* tp_init */
+	0,                         /* tp_alloc */
+	listdir_stat_new,          /* tp_new */
+};
+
+static PyObject *listfiles(PyObject *list, DIR *dir,
+			   int keep_stat, int *need_stat)
+{
+	struct dirent *ent;
+	PyObject *name, *py_kind, *val;
+
+#ifdef DT_REG
+	*need_stat = 0;
+#else
+	*need_stat = 1;
+#endif
+
+	for (ent = readdir(dir); ent; ent = readdir(dir)) {
+		int kind = -1;
+
+		if (!strcmp(ent->d_name, ".") || !strcmp(ent->d_name, ".."))
+			continue;
+
+#ifdef DT_REG
+		if (!keep_stat)
+			switch (ent->d_type) {
+			case DT_REG: kind = S_IFREG; break;
+			case DT_DIR: kind = S_IFDIR; break;
+			case DT_LNK: kind = S_IFLNK; break;
+			case DT_BLK: kind = S_IFBLK; break;
+			case DT_CHR: kind = S_IFCHR; break;
+			case DT_FIFO: kind = S_IFIFO; break;
+			case DT_SOCK: kind = S_IFSOCK; break;
+			default:
+				*need_stat = 0;
+				break;
+			}
+#endif
+
+		if (kind != -1)
+			py_kind = PyInt_FromLong(kind);
+		else {
+			py_kind = Py_None;
+			Py_INCREF(Py_None);
+		}
+
+		val = PyTuple_New(keep_stat ? 3 : 2);
+		name = PyString_FromString(ent->d_name);
+
+		if (!name || !py_kind || !val) {
+			Py_XDECREF(name);
+			Py_XDECREF(py_kind);
+			Py_XDECREF(val);
+			return PyErr_NoMemory();
+		}
+
+		PyTuple_SET_ITEM(val, 0, name);
+		PyTuple_SET_ITEM(val, 1, py_kind);
+		if (keep_stat) {
+			PyTuple_SET_ITEM(val, 2, Py_None);
+			Py_INCREF(Py_None);
+		}
+
+		PyList_Append(list, val);
+		Py_DECREF(val);
+	}
+
+	return 0;
+}
+
+static PyObject *statfiles(PyObject *list, PyObject *ctor_args, int keep,
+			   char *path, int len, int dfd)
+{
+	struct stat buf;
+	struct stat *stp = &buf;
+	int kind;
+	int ret;
+	ssize_t i;
+	ssize_t size = PyList_Size(list);
+
+	for (i = 0; i < size; i++) {
+		PyObject *elt = PyList_GetItem(list, i);
+		char *name = PyString_AsString(PyTuple_GET_ITEM(elt, 0));
+		PyObject *py_st = NULL;
+		PyObject *py_kind = PyTuple_GET_ITEM(elt, 1);
+
+		kind = py_kind == Py_None ? -1 : PyInt_AsLong(py_kind);
+		if (kind != -1 && !keep)
+			continue;
+
+		strncpy(path + len + 1, name, PATH_MAX - len);
+		path[PATH_MAX] = 0;
+
+		if (keep) {
+			py_st = PyObject_CallObject(
+				(PyObject *)&listdir_stat_type, ctor_args);
+			if (!py_st)
+				return PyErr_NoMemory();
+			stp = &((struct listdir_stat *)py_st)->st;
+			PyTuple_SET_ITEM(elt, 2, py_st);
+		}
+
+#ifdef AT_SYMLINK_NOFOLLOW
+		ret = fstatat(dfd, name, stp, AT_SYMLINK_NOFOLLOW);
+#else
+		ret = lstat(path, stp);
+#endif
+		if (ret == -1)
+			return PyErr_SetFromErrnoWithFilename(PyExc_OSError,
+							      path);
+
+		if (kind == -1) {
+			if (S_ISREG(stp->st_mode))
+				kind = S_IFREG;
+			else if (S_ISDIR(stp->st_mode))
+				kind = S_IFDIR;
+			else if (S_ISLNK(stp->st_mode))
+				kind = S_IFLNK;
+			else if (S_ISBLK(stp->st_mode))
+				kind = S_IFBLK;
+			else if (S_ISCHR(stp->st_mode))
+				kind = S_IFCHR;
+			else if (S_ISFIFO(stp->st_mode))
+				kind = S_IFIFO;
+			else if (S_ISSOCK(stp->st_mode))
+				kind = S_IFSOCK;
+			else
+				kind = stp->st_mode;
+		}
+
+		if (py_kind == Py_None && kind != -1) {
+			py_kind = PyInt_FromLong(kind);
+			if (!py_kind)
+				return PyErr_NoMemory();
+			Py_XDECREF(Py_None);
+			PyTuple_SET_ITEM(elt, 1, py_kind);
+		}
+	}
+
+	return 0;
+}
+
+static PyObject *listdir(PyObject *self, PyObject *args, PyObject *kwargs)
+{
+	static char *kwlist[] = { "path", "stat", NULL };
+	DIR *dir = NULL;
+	PyObject *statobj = NULL;
+	PyObject *list = NULL;
+	PyObject *err = NULL;
+	PyObject *ctor_args = NULL;
+	char *path;
+	char full_path[PATH_MAX + 10];
+	int path_len;
+	int need_stat, keep_stat;
+	int dfd;
+
+	if (!PyArg_ParseTupleAndKeywords(args, kwargs, "s#|O:listdir", kwlist,
+					 &path, &path_len, &statobj))
+		goto bail;
+
+	keep_stat = statobj && PyObject_IsTrue(statobj);
+
+#ifdef AT_SYMLINK_NOFOLLOW
+	dfd = open(path, O_RDONLY);
+	dir = fdopendir(dfd);
+#else
+	dir = opendir(path);
+	dfd = -1;
+#endif
+	if (!dir) {
+		err = PyErr_SetFromErrnoWithFilename(PyExc_OSError, path);
+		goto bail;
+	}
+
+	list = PyList_New(0);
+	ctor_args = PyTuple_New(0);
+	if (!list || !ctor_args)
+		goto bail;
+
+	strncpy(full_path, path, PATH_MAX);
+	full_path[path_len] = '/';
+
+	err = listfiles(list, dir, keep_stat, &need_stat);
+	if (err)
+		goto bail;
+
+	PyList_Sort(list);
+
+	if (!keep_stat && !need_stat)
+		goto done;
+
+	err = statfiles(list, ctor_args, keep_stat, full_path, path_len, dfd);
+	if (!err)
+		goto done;
+
+ bail:
+	Py_XDECREF(list);
+
+ done:
+	Py_XDECREF(ctor_args);
+	if (dir)
+		closedir(dir);
+	return err ? err : list;
+}
+
+
+static char osutil_doc[] = "Native operating system services.";
+
+static PyMethodDef methods[] = {
+	{"listdir", (PyCFunction)listdir, METH_VARARGS | METH_KEYWORDS,
+	 "list a directory\n"},
+	{NULL, NULL}
+};
+
+PyMODINIT_FUNC initosutil(void)
+{
+	if (PyType_Ready(&listdir_stat_type) == -1)
+		return;
+
+	Py_InitModule3("osutil", methods, osutil_doc);
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/osutil.py	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,37 @@
+import os, stat
+
+def _mode_to_kind(mode):
+    if stat.S_ISREG(mode): return stat.S_IFREG
+    if stat.S_ISDIR(mode): return stat.S_IFDIR
+    if stat.S_ISLNK(mode): return stat.S_IFLNK
+    if stat.S_ISBLK(mode): return stat.S_IFBLK
+    if stat.S_ISCHR(mode): return stat.S_IFCHR
+    if stat.S_ISFIFO(mode): return stat.S_IFIFO
+    if stat.S_ISSOCK(mode): return stat.S_IFSOCK
+    return mode
+
+def listdir(path, stat=False):
+    '''listdir(path, stat=False) -> list_of_tuples
+
+    Return a sorted list containing information about the entries
+    in the directory.
+
+    If stat is True, each element is a 3-tuple:
+
+      (name, type, stat object)
+
+    Otherwise, each element is a 2-tuple:
+
+      (name, type)
+    '''
+    result = []
+    prefix = path + os.sep
+    names = os.listdir(path)
+    names.sort()
+    for fn in names:
+        st = os.lstat(prefix + fn)
+        if stat:
+            result.append((fn, _mode_to_kind(st.st_mode), st))
+        else:
+            result.append((fn, _mode_to_kind(st.st_mode)))
+    return result
--- a/mercurial/patch.py	Thu Jul 26 07:56:27 2007 -0400
+++ b/mercurial/patch.py	Wed Feb 06 19:57:52 2008 -0800
@@ -9,7 +9,7 @@
 from i18n import _
 from node import *
 import base85, cmdutil, mdiff, util, context, revlog, diffhelpers
-import cStringIO, email.Parser, os, popen2, re, sha
+import cStringIO, email.Parser, os, popen2, re, sha, errno
 import sys, tempfile, zlib
 
 class PatchError(Exception):
@@ -59,6 +59,7 @@
 
         subject = msg['Subject']
         user = msg['From']
+        gitsendmail = 'git-send-email' in msg.get('X-Mailer', '')
         # should try to parse msg['Date']
         date = None
         nodeid = None
@@ -111,7 +112,7 @@
                             nodeid = line[10:]
                         elif line.startswith("# Parent "):
                             parents.append(line[10:])
-                    elif line == '---' and 'git-send-email' in msg['X-Mailer']:
+                    elif line == '---' and gitsendmail:
                         ignoretext = True
                     if not line.startswith('# ') and not ignoretext:
                         cfp.write(line)
@@ -142,7 +143,7 @@
 GP_FILTER = 1 << 1  # there's some copy/rename operation
 GP_BINARY = 1 << 2  # there's a binary patch
 
-def readgitpatch(fp, firstline):
+def readgitpatch(fp, firstline=None):
     """extract git-style metadata about patches from <patchname>"""
     class gitpatch:
         "op is one of ADD, DELETE, RENAME, MODIFY or COPY"
@@ -151,12 +152,12 @@
             self.oldpath = None
             self.mode = None
             self.op = 'MODIFY'
-            self.copymod = False
             self.lineno = 0
             self.binary = False
 
     def reader(fp, firstline):
-        yield firstline
+        if firstline is not None:
+            yield firstline
         for line in fp:
             yield line
 
@@ -181,7 +182,6 @@
         elif gp:
             if line.startswith('--- '):
                 if gp.op in ('COPY', 'RENAME'):
-                    gp.copymod = True
                     dopatch |= GP_FILTER
                 gitpatches.append(gp)
                 gp = None
@@ -201,9 +201,9 @@
                 gp.op = 'DELETE'
             elif line.startswith('new file mode '):
                 gp.op = 'ADD'
-                gp.mode = int(line.rstrip()[-3:], 8)
+                gp.mode = int(line.rstrip()[-6:], 8)
             elif line.startswith('new mode '):
-                gp.mode = int(line.rstrip()[-3:], 8)
+                gp.mode = int(line.rstrip()[-6:], 8)
             elif line.startswith('GIT binary patch'):
                 dopatch |= GP_BINARY
                 gp.binary = True
@@ -249,7 +249,7 @@
     fuzz = False
     if cwd:
         args.append('-d %s' % util.shellquote(cwd))
-    fp = os.popen('%s %s -p%d < %s' % (patcher, ' '.join(args), strip,
+    fp = util.popen('%s %s -p%d < %s' % (patcher, ' '.join(args), strip,
                                        util.shellquote(patchname)))
 
     for line in fp:
@@ -278,10 +278,13 @@
                          util.explain_exit(code)[0])
     return fuzz
 
-def internalpatch(patchname, ui, strip, cwd, files):
-    """use builtin patch to apply <patchname> to the working directory.
+def internalpatch(patchobj, ui, strip, cwd, files={}):
+    """use builtin patch to apply <patchobj> to the working directory.
     returns whether patch was applied with fuzz factor."""
-    fp = file(patchname, 'rb')
+    try:
+        fp = file(patchobj, 'rb')
+    except TypeError:
+        fp = patchobj
     if cwd:
         curdir = os.getcwd()
         os.chdir(cwd)
@@ -299,25 +302,27 @@
 contextdesc = re.compile('(---|\*\*\*) (\d+)(,(\d+))? (---|\*\*\*)')
 
 class patchfile:
-    def __init__(self, ui, fname):
+    def __init__(self, ui, fname, missing=False):
         self.fname = fname
         self.ui = ui
-        try:
-            fp = file(fname, 'rb')
-            self.lines = fp.readlines()
-            self.exists = True
-        except IOError:
+        self.lines = []
+        self.exists = False
+        self.missing = missing
+        if not missing:
+            try:
+                fp = file(fname, 'rb')
+                self.lines = fp.readlines()
+                self.exists = True
+            except IOError:
+                pass
+        else:
+            self.ui.warn(_("unable to find '%s' for patching\n") % self.fname)
+
+        if not self.exists:
             dirname = os.path.dirname(fname)
             if dirname and not os.path.isdir(dirname):
-                dirs = dirname.split(os.path.sep)
-                d = ""
-                for x in dirs:
-                    d = os.path.join(d, x)
-                    if not os.path.isdir(d):
-                        os.mkdir(d)
-            self.lines = []
-            self.exists = False
-            
+                os.makedirs(dirname)
+
         self.hash = {}
         self.dirty = 0
         self.offset = 0
@@ -346,7 +351,7 @@
             vala = abs(a - linenum)
             valb = abs(b - linenum)
             return cmp(vala, valb)
-            
+
         try:
             cand = self.hash[l]
         except:
@@ -354,7 +359,7 @@
 
         if len(cand) > 1:
             # resort our list of potentials forward then back.
-            cand.sort(cmp=sorter)
+            cand.sort(sorter)
         return cand
 
     def hashlines(self):
@@ -399,11 +404,13 @@
             st = None
             try:
                 st = os.lstat(dest)
-                if st.st_nlink > 1:
-                    os.unlink(dest)
-            except: pass
+            except OSError, inst:
+                if inst.errno != errno.ENOENT:
+                    raise
+            if st and st.st_nlink > 1:
+                os.unlink(dest)
             fp = file(dest, 'wb')
-            if st:
+            if st and st.st_nlink > 1:
                 os.chmod(dest, st.st_mode)
             fp.writelines(self.lines)
             fp.close()
@@ -422,6 +429,10 @@
         if reverse:
             h.reverse()
 
+        if self.missing:
+            self.rej.append(h)
+            return -1
+
         if self.exists and h.createfile():
             self.ui.warn(_("file %s already exists\n") % self.fname)
             self.rej.append(h)
@@ -494,7 +505,7 @@
         return -1
 
 class hunk:
-    def __init__(self, desc, num, lr, context):
+    def __init__(self, desc, num, lr, context, gitpatch=None):
         self.number = num
         self.desc = desc
         self.hunk = [ desc ]
@@ -504,6 +515,7 @@
             self.read_context_hunk(lr)
         else:
             self.read_unified_hunk(lr)
+        self.gitpatch = gitpatch
 
     def read_unified_hunk(self, lr):
         m = unidesc.match(self.desc)
@@ -658,10 +670,12 @@
         return len(self.a) == self.lena and len(self.b) == self.lenb
 
     def createfile(self):
-        return self.starta == 0 and self.lena == 0
+        create = self.gitpatch is None or self.gitpatch.op == 'ADD'
+        return self.starta == 0 and self.lena == 0 and create
 
     def rmfile(self):
-        return self.startb == 0 and self.lenb == 0
+        remove = self.gitpatch is None or self.gitpatch.op == 'DELETE'
+        return self.startb == 0 and self.lenb == 0 and remove
 
     def fuzzit(self, l, fuzz, toponly):
         # this removes context lines from the top and bottom of list 'l'.  It
@@ -702,7 +716,7 @@
 
     def old(self, fuzz=0, toponly=False):
         return self.fuzzit(self.a, fuzz, toponly)
-        
+
     def newctrl(self):
         res = []
         for x in self.hunk:
@@ -762,7 +776,7 @@
 
 def parsefilename(str):
     # --- filename \t|space stuff
-    s = str[4:]
+    s = str[4:].rstrip('\r\n')
     i = s.find('\t')
     if i < 0:
         i = s.find(' ')
@@ -791,31 +805,32 @@
     nulla = afile_orig == "/dev/null"
     nullb = bfile_orig == "/dev/null"
     afile = pathstrip(afile_orig, strip)
-    gooda = os.path.exists(afile) and not nulla
+    gooda = not nulla and os.path.exists(afile)
     bfile = pathstrip(bfile_orig, strip)
     if afile == bfile:
         goodb = gooda
     else:
-        goodb = os.path.exists(bfile) and not nullb
+        goodb = not nullb and os.path.exists(bfile)
     createfunc = hunk.createfile
     if reverse:
         createfunc = hunk.rmfile
-    if not goodb and not gooda and not createfunc():
-        raise PatchError(_("unable to find %s or %s for patching") %
-                         (afile, bfile))
-    if gooda and goodb:
-        fname = bfile
-        if afile in bfile:
+    missing = not goodb and not gooda and not createfunc()
+    fname = None
+    if not missing:
+        if gooda and goodb:
+            fname = (afile in bfile) and afile or bfile
+        elif gooda:
             fname = afile
-    elif gooda:
-        fname = afile
-    elif not nullb:
-        fname = bfile
-        if afile in bfile:
+
+    if not fname:
+        if not nullb:
+            fname = (afile in bfile) and afile or bfile
+        elif not nulla:
             fname = afile
-    elif not nulla:
-        fname = afile
-    return fname
+        else:
+            raise PatchError(_("undefined source and destination files"))
+
+    return fname, missing
 
 class linereader:
     # simple class to allow pushing lines back into the input stream
@@ -833,14 +848,16 @@
             return l
         return self.fp.readline()
 
-def applydiff(ui, fp, changed, strip=1, sourcefile=None, reverse=False,
-              rejmerge=None, updatedir=None):
-    """reads a patch from fp and tries to apply it.  The dict 'changed' is
-       filled in with all of the filenames changed by the patch.  Returns 0
-       for a clean patch, -1 if any rejects were found and 1 if there was
-       any fuzz.""" 
+def iterhunks(ui, fp, sourcefile=None):
+    """Read a patch and yield the following events:
+    - ("file", afile, bfile, firsthunk): select a new target file.
+    - ("hunk", hunk): a new hunk is ready to be applied, follows a
+    "file" event.
+    - ("git", gitchanges): current diff is in git format, gitchanges
+    maps filenames to gitpatch records. Unique event.
+    """
 
-    def scangitpatch(fp, firstline, cwd=None):
+    def scangitpatch(fp, firstline):
         '''git patches can modify a file, then copy that file to
         a new file, but expect the source to be the unmodified form.
         So we scan the patch looking for that case so we can do
@@ -853,28 +870,23 @@
             fp = cStringIO.StringIO(fp.read())
 
         (dopatch, gitpatches) = readgitpatch(fp, firstline)
-        for gp in gitpatches:
-            if gp.copymod:
-                copyfile(gp.oldpath, gp.path, basedir=cwd)
-
         fp.seek(pos)
 
         return fp, dopatch, gitpatches
 
+    changed = {}
     current_hunk = None
-    current_file = None
     afile = ""
     bfile = ""
     state = None
     hunknum = 0
-    rejects = 0
+    emitfile = False
 
     git = False
     gitre = re.compile('diff --git (a/.*) (b/.*)')
 
     # our states
     BFILE = 1
-    err = 0
     context = None
     lr = linereader(fp)
     dopatch = True
@@ -888,11 +900,7 @@
         if current_hunk:
             if x.startswith('\ '):
                 current_hunk.fix_newline()
-            ret = current_file.apply(current_hunk, reverse)
-            if ret >= 0:
-                changed.setdefault(current_file.fname, (None, None))
-                if ret > 0:
-                    err = 1
+            yield 'hunk', current_hunk
             current_hunk = None
             gitworkdone = False
         if ((sourcefile or state == BFILE) and ((not context and x[0] == '@') or
@@ -900,29 +908,22 @@
             try:
                 if context == None and x.startswith('***************'):
                     context = True
-                current_hunk = hunk(x, hunknum + 1, lr, context)
+                gpatch = changed.get(bfile[2:], (None, None))[1]
+                current_hunk = hunk(x, hunknum + 1, lr, context, gpatch)
             except PatchError, err:
                 ui.debug(err)
                 current_hunk = None
                 continue
             hunknum += 1
-            if not current_file:
-                if sourcefile:
-                    current_file = patchfile(ui, sourcefile)
-                else:
-                    current_file = selectfile(afile, bfile, current_hunk,
-                                              strip, reverse)
-                    current_file = patchfile(ui, current_file)
+            if emitfile:
+                emitfile = False
+                yield 'file', (afile, bfile, current_hunk)
         elif state == BFILE and x.startswith('GIT binary patch'):
             current_hunk = binhunk(changed[bfile[2:]][1])
-            if not current_file:
-                if sourcefile:
-                    current_file = patchfile(ui, sourcefile)
-                else:
-                    current_file = selectfile(afile, bfile, current_hunk,
-                                              strip, reverse)
-                    current_file = patchfile(ui, current_file)
             hunknum += 1
+            if emitfile:
+                emitfile = False
+                yield 'file', (afile, bfile, current_hunk)
             current_hunk.extract(fp)
         elif x.startswith('diff --git'):
             # check for git diff, scanning the whole patch file if needed
@@ -932,6 +933,7 @@
                 if not git:
                     git = True
                     fp, dopatch, gitpatches = scangitpatch(fp, x)
+                    yield 'git', gitpatches
                     for gp in gitpatches:
                         changed[gp.path] = (gp.op, gp)
                 # else error?
@@ -968,36 +970,79 @@
             bfile = parsefilename(l2)
 
         if newfile:
-            if current_file:
-                current_file.close()
-                if rejmerge:
-                    rejmerge(current_file)
-                rejects += len(current_file.rej)
+            emitfile = True
             state = BFILE
-            current_file = None
             hunknum = 0
     if current_hunk:
         if current_hunk.complete():
+            yield 'hunk', current_hunk
+        else:
+            raise PatchError(_("malformed patch %s %s") % (afile,
+                             current_hunk.desc))
+
+    if hunknum == 0 and dopatch and not gitworkdone:
+        raise NoHunks
+
+def applydiff(ui, fp, changed, strip=1, sourcefile=None, reverse=False,
+              rejmerge=None, updatedir=None):
+    """reads a patch from fp and tries to apply it.  The dict 'changed' is
+       filled in with all of the filenames changed by the patch.  Returns 0
+       for a clean patch, -1 if any rejects were found and 1 if there was
+       any fuzz."""
+
+    rejects = 0
+    err = 0
+    current_file = None
+    gitpatches = None
+
+    def closefile():
+        if not current_file:
+            return 0
+        current_file.close()
+        if rejmerge:
+            rejmerge(current_file)
+        return len(current_file.rej)
+
+    for state, values in iterhunks(ui, fp, sourcefile):
+        if state == 'hunk':
+            if not current_file:
+                continue
+            current_hunk = values
             ret = current_file.apply(current_hunk, reverse)
             if ret >= 0:
                 changed.setdefault(current_file.fname, (None, None))
                 if ret > 0:
                     err = 1
+        elif state == 'file':
+            rejects += closefile()
+            afile, bfile, first_hunk = values
+            try:
+                if sourcefile:
+                    current_file = patchfile(ui, sourcefile)
+                else:
+                    current_file, missing = selectfile(afile, bfile, first_hunk,
+                                            strip, reverse)
+                    current_file = patchfile(ui, current_file, missing)
+            except PatchError, err:
+                ui.warn(str(err) + '\n')
+                current_file, current_hunk = None, None
+                rejects += 1
+                continue
+        elif state == 'git':
+            gitpatches = values
+            for gp in gitpatches:
+                if gp.op in ('COPY', 'RENAME'):
+                    copyfile(gp.oldpath, gp.path)
+                changed[gp.path] = (gp.op, gp)
         else:
-            fname = current_file and current_file.fname or None
-            raise PatchError(_("malformed patch %s %s") % (fname,
-                             current_hunk.desc))
-    if current_file:
-        current_file.close()
-        if rejmerge:
-            rejmerge(current_file)
-        rejects += len(current_file.rej)
-    if updatedir and git:
+            raise util.Abort(_('unsupported parser state: %s') % state)
+
+    rejects += closefile()
+
+    if updatedir and gitpatches:
         updatedir(gitpatches)
     if rejects:
         return -1
-    if hunknum == 0 and dopatch and not gitworkdone:
-        raise NoHunks
     return err
 
 def diffopts(ui, opts={}, untrusted=False):
@@ -1027,15 +1072,13 @@
     for f in patches:
         ctype, gp = patches[f]
         if ctype == 'RENAME':
-            copies.append((gp.oldpath, gp.path, gp.copymod))
+            copies.append((gp.oldpath, gp.path))
             removes[gp.oldpath] = 1
         elif ctype == 'COPY':
-            copies.append((gp.oldpath, gp.path, gp.copymod))
+            copies.append((gp.oldpath, gp.path))
         elif ctype == 'DELETE':
             removes[gp.path] = 1
-    for src, dst, after in copies:
-        if not after:
-            copyfile(src, dst, repo.root)
+    for src, dst in copies:
         repo.copy(src, dst)
     removes = removes.keys()
     if removes:
@@ -1044,13 +1087,17 @@
     for f in patches:
         ctype, gp = patches[f]
         if gp and gp.mode:
-            x = gp.mode & 0100 != 0
+            flags = ''
+            if gp.mode & 0100:
+                flags = 'x'
+            elif gp.mode & 020000:
+                flags = 'l'
             dst = os.path.join(repo.root, gp.path)
             # patch won't create empty files
             if ctype == 'ADD' and not os.path.exists(dst):
-                repo.wwrite(gp.path, '', x and 'x' or '')
+                repo.wwrite(gp.path, '', flags)
             else:
-                util.set_exec(dst, x)
+                util.set_flags(dst, flags)
     cmdutil.addremove(repo, cfiles)
     files = patches.keys()
     files.extend([r for r in removes if r not in files])
@@ -1058,7 +1105,7 @@
 
     return files
 
-def b85diff(fp, to, tn):
+def b85diff(to, tn):
     '''print base85-encoded binary diff'''
     def gitindex(text):
         if not text:
@@ -1142,24 +1189,30 @@
     if node2:
         ctx2 = context.changectx(repo, node2)
         execf2 = ctx2.manifest().execf
+        linkf2 = ctx2.manifest().linkf
     else:
         ctx2 = context.workingctx(repo)
         execf2 = util.execfunc(repo.root, None)
+        linkf2 = util.linkfunc(repo.root, None)
         if execf2 is None:
-            execf2 = ctx2.parents()[0].manifest().copy().execf
+            mc = ctx2.parents()[0].manifest().copy()
+            execf2 = mc.execf
+            linkf2 = mc.linkf
 
     # returns False if there was no rename between ctx1 and ctx2
     # returns None if the file was created between ctx1 and ctx2
     # returns the (file, node) present in ctx1 that was renamed to f in ctx2
-    def renamed(f):
-        startrev = ctx1.rev()
-        c = ctx2
+    # This will only really work if c1 is the Nth 1st parent of c2.
+    def renamed(c1, c2, man, f):
+        startrev = c1.rev()
+        c = c2
         crev = c.rev()
         if crev is None:
             crev = repo.changelog.count()
         orig = f
+        files = (f,)
         while crev > startrev:
-            if f in c.files():
+            if f in files:
                 try:
                     src = getfilectx(f, c).renamed()
                 except revlog.LookupError:
@@ -1169,7 +1222,8 @@
             crev = c.parents()[0].rev()
             # try to reuse
             c = getctx(crev)
-        if f not in man1:
+            files = c.files()
+        if f not in man:
             return None
         if f == orig:
             return False
@@ -1183,11 +1237,27 @@
 
     if opts.git:
         copied = {}
-        for f in added:
-            src = renamed(f)
+        c1, c2 = ctx1, ctx2
+        files = added
+        man = man1
+        if node2 and ctx1.rev() >= ctx2.rev():
+            # renamed() starts at c2 and walks back in history until c1.
+            # Since ctx1.rev() >= ctx2.rev(), invert ctx2 and ctx1 to
+            # detect (inverted) copies.
+            c1, c2 = ctx2, ctx1
+            files = removed
+            man = ctx2.manifest()
+        for f in files:
+            src = renamed(c1, c2, man, f)
             if src:
                 copied[f] = src
-        srcs = [x[1] for x in copied.items()]
+        if ctx1 == c2:
+            # invert the copied dict
+            copied = dict([(v, k) for (k, v) in copied.iteritems()])
+        # If we've renamed file foo to bar (copied['bar'] = 'foo'),
+        # avoid showing a diff for foo if we're going to show
+        # the rename to bar.
+        srcs = [x[1] for x in copied.iteritems() if x[0] in added]
 
     all = modified + added + removed
     all.sort()
@@ -1202,20 +1272,20 @@
             to = getfilectx(f, ctx1).data()
         if f not in removed:
             tn = getfilectx(f, ctx2).data()
+        a, b = f, f
         if opts.git:
-            def gitmode(x):
-                return x and '100755' or '100644'
+            def gitmode(x, l):
+                return l and '120000' or (x and '100755' or '100644')
             def addmodehdr(header, omode, nmode):
                 if omode != nmode:
                     header.append('old mode %s\n' % omode)
                     header.append('new mode %s\n' % nmode)
 
-            a, b = f, f
             if f in added:
-                mode = gitmode(execf2(f))
+                mode = gitmode(execf2(f), linkf2(f))
                 if f in copied:
                     a = copied[f]
-                    omode = gitmode(man1.execf(a))
+                    omode = gitmode(man1.execf(a), man1.linkf(a))
                     addmodehdr(header, omode, mode)
                     if a in removed and a not in gone:
                         op = 'rename'
@@ -1233,11 +1303,11 @@
                 if f in srcs:
                     dodiff = False
                 else:
-                    mode = gitmode(man1.execf(f))
+                    mode = gitmode(man1.execf(f), man1.linkf(f))
                     header.append('deleted file mode %s\n' % mode)
             else:
-                omode = gitmode(man1.execf(f))
-                nmode = gitmode(execf2(f))
+                omode = gitmode(man1.execf(f), man1.linkf(f))
+                nmode = gitmode(execf2(f), linkf2(f))
                 addmodehdr(header, omode, nmode)
                 if util.binary(to) or util.binary(tn):
                     dodiff = 'binary'
@@ -1245,12 +1315,12 @@
             header.insert(0, 'diff --git a/%s b/%s\n' % (a, b))
         if dodiff:
             if dodiff == 'binary':
-                text = b85diff(fp, to, tn)
+                text = b85diff(to, tn)
             else:
                 text = mdiff.unidiff(to, date1,
                                     # ctx2 date may be dynamic
                                     tn, util.datestr(ctx2.date()),
-                                    f, r, opts=opts)
+                                    a, b, r, opts=opts)
             if text or len(header) > 1:
                 fp.write(''.join(header))
             fp.write(text)
@@ -1303,7 +1373,8 @@
     try:
         p = popen2.Popen3('diffstat -p1 -w79 2>/dev/null > ' + name)
         try:
-            for line in patchlines: print >> p.tochild, line
+            for line in patchlines:
+                p.tochild.write(line + "\n")
             p.tochild.close()
             if p.wait(): return
             fp = os.fdopen(fd, 'r')
@@ -1312,7 +1383,6 @@
             last = stat.pop()
             stat.insert(0, last)
             stat = ''.join(stat)
-            if stat.startswith('0 files'): raise ValueError
             return stat
         except: raise
     finally:
--- a/mercurial/repair.py	Thu Jul 26 07:56:27 2007 -0400
+++ b/mercurial/repair.py	Wed Feb 06 19:57:52 2008 -0800
@@ -6,71 +6,86 @@
 # This software may be used and distributed according to the terms
 # of the GNU General Public License, incorporated herein by reference.
 
-import changegroup, revlog, os, commands
+import changegroup, os
+from node import *
 
-def strip(ui, repo, rev, backup="all"):
-    def limitheads(chlog, stop):
-        """return the list of all nodes that have no children"""
-        p = {}
-        h = []
-        stoprev = 0
-        if stop in chlog.nodemap:
-            stoprev = chlog.rev(stop)
+def _limitheads(cl, stoprev):
+    """return the list of all revs >= stoprev that have no children"""
+    seen = {}
+    heads = []
+
+    for r in xrange(cl.count() - 1, stoprev - 1, -1):
+        if r not in seen:
+            heads.append(r)
+        for p in cl.parentrevs(r):
+            seen[p] = 1
+    return heads
 
-        for r in xrange(chlog.count() - 1, -1, -1):
-            n = chlog.node(r)
-            if n not in p:
-                h.append(n)
-            if n == stop:
-                break
-            if r < stoprev:
-                break
-            for pn in chlog.parents(n):
-                p[pn] = 1
-        return h
+def _bundle(repo, bases, heads, node, suffix, extranodes=None):
+    """create a bundle with the specified revisions as a backup"""
+    cg = repo.changegroupsubset(bases, heads, 'strip', extranodes)
+    backupdir = repo.join("strip-backup")
+    if not os.path.isdir(backupdir):
+        os.mkdir(backupdir)
+    name = os.path.join(backupdir, "%s-%s" % (short(node), suffix))
+    repo.ui.warn("saving bundle to %s\n" % name)
+    return changegroup.writebundle(cg, name, "HG10BZ")
 
-    def bundle(repo, bases, heads, rev, suffix):
-        cg = repo.changegroupsubset(bases, heads, 'strip')
-        backupdir = repo.join("strip-backup")
-        if not os.path.isdir(backupdir):
-            os.mkdir(backupdir)
-        name = os.path.join(backupdir, "%s-%s" % (revlog.short(rev), suffix))
-        ui.warn("saving bundle to %s\n" % name)
-        return changegroup.writebundle(cg, name, "HG10BZ")
+def _collectfiles(repo, striprev):
+    """find out the filelogs affected by the strip"""
+    files = {}
+
+    for x in xrange(striprev, repo.changelog.count()):
+        for name in repo.changectx(x).files():
+            if name in files:
+                continue
+            files[name] = 1
+
+    files = files.keys()
+    files.sort()
+    return files
 
-    def stripall(revnum):
-        mm = repo.changectx(rev).manifest()
-        seen = {}
+def _collectextranodes(repo, files, link):
+    """return the nodes that have to be saved before the strip"""
+    def collectone(revlog):
+        extra = []
+        startrev = count = revlog.count()
+        # find the truncation point of the revlog
+        for i in xrange(0, count):
+            node = revlog.node(i)
+            lrev = revlog.linkrev(node)
+            if lrev >= link:
+                startrev = i + 1
+                break
+
+        # see if any revision after that point has a linkrev less than link
+        # (we have to manually save these guys)
+        for i in xrange(startrev, count):
+            node = revlog.node(i)
+            lrev = revlog.linkrev(node)
+            if lrev < link:
+                extra.append((node, cl.node(lrev)))
 
-        for x in xrange(revnum, repo.changelog.count()):
-            for f in repo.changectx(x).files():
-                if f in seen:
-                    continue
-                seen[f] = 1
-                if f in mm:
-                    filerev = mm[f]
-                else:
-                    filerev = 0
-                seen[f] = filerev
-        # we go in two steps here so the strip loop happens in a
-        # sensible order.  When stripping many files, this helps keep
-        # our disk access patterns under control.
-        seen_list = seen.keys()
-        seen_list.sort()
-        for f in seen_list:
-            ff = repo.file(f)
-            filerev = seen[f]
-            if filerev != 0:
-                if filerev in ff.nodemap:
-                    filerev = ff.rev(filerev)
-                else:
-                    filerev = 0
-            ff.strip(filerev, revnum)
+        return extra
 
-    chlog = repo.changelog
+    extranodes = {}
+    cl = repo.changelog
+    extra = collectone(repo.manifest)
+    if extra:
+        extranodes[1] = extra
+    for fname in files:
+        f = repo.file(fname)
+        extra = collectone(f)
+        if extra:
+            extranodes[fname] = extra
+
+    return extranodes
+
+def strip(ui, repo, node, backup="all"):
+    cl = repo.changelog
     # TODO delete the undo files, and handle undo of merge sets
-    pp = chlog.parents(rev)
-    revnum = chlog.rev(rev)
+    pp = cl.parents(node)
+    striprev = cl.rev(node)
 
     # save is a list of all the branches we are truncating away
     # that we actually want to keep.  changegroup will be used
@@ -78,7 +93,7 @@
     saveheads = []
     savebases = {}
 
-    heads = limitheads(chlog, rev)
+    heads = [cl.node(r) for r in _limitheads(cl, striprev)]
     seen = {}
 
     # search through all the heads, finding those where the revision
@@ -89,39 +104,48 @@
         n = h
         while True:
             seen[n] = 1
-            pp = chlog.parents(n)
-            if pp[1] != revlog.nullid:
+            pp = cl.parents(n)
+            if pp[1] != nullid:
                 for p in pp:
-                    if chlog.rev(p) > revnum and p not in seen:
+                    if cl.rev(p) > striprev and p not in seen:
                         heads.append(p)
-            if pp[0] == revlog.nullid:
+            if pp[0] == nullid:
                 break
-            if chlog.rev(pp[0]) < revnum:
+            if cl.rev(pp[0]) < striprev:
                 break
             n = pp[0]
-            if n == rev:
+            if n == node:
                 break
-        r = chlog.reachable(h, rev)
-        if rev not in r:
+        r = cl.reachable(h, node)
+        if node not in r:
             saveheads.append(h)
             for x in r:
-                if chlog.rev(x) > revnum:
+                if cl.rev(x) > striprev:
                     savebases[x] = 1
 
+    files = _collectfiles(repo, striprev)
+
+    extranodes = _collectextranodes(repo, files, striprev)
+
     # create a changegroup for all the branches we need to keep
     if backup == "all":
-        bundle(repo, [rev], chlog.heads(), rev, 'backup')
-    if saveheads:
-        chgrpfile = bundle(repo, savebases.keys(), saveheads, rev, 'temp')
-
-    stripall(revnum)
+        _bundle(repo, [node], cl.heads(), node, 'backup')
+    if saveheads or extranodes:
+        chgrpfile = _bundle(repo, savebases.keys(), saveheads, node, 'temp',
+                            extranodes)
 
-    change = chlog.read(rev)
-    chlog.strip(revnum, revnum)
-    repo.manifest.strip(repo.manifest.rev(change[0]), revnum)
-    if saveheads:
+    cl.strip(striprev)
+    repo.manifest.strip(striprev)
+    for name in files:
+        f = repo.file(name)
+        f.strip(striprev)
+
+    if saveheads or extranodes:
         ui.status("adding branch\n")
-        commands.unbundle(ui, repo, "file:%s" % chgrpfile, update=False)
+        f = open(chgrpfile, "rb")
+        gen = changegroup.readbundle(f, chgrpfile)
+        repo.addchangegroup(gen, 'strip', 'bundle:' + chgrpfile, True)
+        f.close()
         if backup != "strip":
             os.unlink(chgrpfile)
 
--- a/mercurial/repo.py	Thu Jul 26 07:56:27 2007 -0400
+++ b/mercurial/repo.py	Wed Feb 06 19:57:52 2008 -0800
@@ -6,19 +6,31 @@
 # This software may be used and distributed according to the terms
 # of the GNU General Public License, incorporated herein by reference.
 
+from i18n import _
+
 class RepoError(Exception):
     pass
 
+class NoCapability(RepoError):
+    pass
+
 class repository(object):
     def capable(self, name):
         '''tell whether repo supports named capability.
         return False if not supported.
         if boolean capability, return True.
         if string capability, return string.'''
+        if name in self.capabilities:
+            return True
         name_eq = name + '='
         for cap in self.capabilities:
-            if name == cap:
-                return True
             if cap.startswith(name_eq):
                 return cap[len(name_eq):]
         return False
+
+    def requirecap(self, name, purpose):
+        '''raise an exception if the given capability is not present'''
+        if not self.capable(name):
+            raise NoCapability(_('cannot %s; remote repository does not '
+                                 'support the %r capability') %
+                               (purpose, name))
--- a/mercurial/revlog.py	Thu Jul 26 07:56:27 2007 -0400
+++ b/mercurial/revlog.py	Wed Feb 06 19:57:52 2008 -0800
@@ -15,6 +15,12 @@
 import binascii, changegroup, errno, ancestor, mdiff, os
 import sha, struct, util, zlib
 
+_pack = struct.pack
+_unpack = struct.unpack
+_compress = zlib.compress
+_decompress = zlib.decompress
+_sha = sha.new
+
 # revlog flags
 REVLOGV0 = 0
 REVLOGNG = 1
@@ -25,12 +31,15 @@
 
 class RevlogError(Exception):
     pass
+
 class LookupError(RevlogError):
-    pass
+    def __init__(self, name, message=None):
+        if message is None:
+            message = _('not found: %s') % name
+        RevlogError.__init__(self, message)
+        self.name = name
 
 def getoffset(q):
-    if q & 0xFFFF:
-        raise RevlogError(_('incompatible revision flag %x') % q)
     return int(q >> 16)
 
 def gettype(q):
@@ -48,7 +57,7 @@
     """
     l = [p1, p2]
     l.sort()
-    s = sha.new(l[0])
+    s = _sha(l[0])
     s.update(l[1])
     s.update(text)
     return s.digest()
@@ -57,12 +66,26 @@
     """ generate a possibly-compressed representation of text """
     if not text:
         return ("", text)
-    if len(text) < 44:
-        if text[0] == '\0':
-            return ("", text)
-        return ('u', text)
-    bin = zlib.compress(text)
-    if len(bin) > len(text):
+    l = len(text)
+    bin = None
+    if l < 44:
+        pass
+    elif l > 1000000:
+        # zlib makes an internal copy, thus doubling memory usage for
+        # large files, so lets do this in pieces
+        z = zlib.compressobj()
+        p = []
+        pos = 0
+        while pos < l:
+            pos2 = pos + 2**20
+            p.append(z.compress(text[pos:pos2]))
+            pos = pos2
+        p.append(z.flush())
+        if sum(map(len, p)) < l:
+            bin = "".join(p)
+    else:
+        bin = _compress(text)
+    if bin is None or len(bin) > l:
         if text[0] == '\0':
             return ("", text)
         return ('u', text)
@@ -76,7 +99,7 @@
     if t == '\0':
         return bin
     if t == 'x':
-        return zlib.decompress(bin)
+        return _decompress(bin)
     if t == 'u':
         return bin[1:]
     raise RevlogError(_("unknown compression type %r") % t)
@@ -89,8 +112,6 @@
     # lazyparser is not safe to use on windows if win32 extensions not
     # available. it keeps file handle open, which make it not possible
     # to break hardlinks on local cloned repos.
-    safe_to_use = os.name != 'nt' or (not util.is_win_9x() and
-                                      hasattr(util, 'win32api'))
 
     def __init__(self, dataf, size):
         self.dataf = dataf
@@ -236,16 +257,15 @@
         self.p.loadindex(pos)
         return self.p.index[pos]
     def __getitem__(self, pos):
-        return struct.unpack(indexformatng,
-                             self.p.index[pos] or self.load(pos))
+        return _unpack(indexformatng, self.p.index[pos] or self.load(pos))
     def __setitem__(self, pos, item):
-        self.p.index[pos] = struct.pack(indexformatng, *item)
+        self.p.index[pos] = _pack(indexformatng, *item)
     def __delitem__(self, pos):
         del self.p.index[pos]
     def insert(self, pos, e):
-        self.p.index.insert(pos, struct.pack(indexformatng, *e))
+        self.p.index.insert(pos, _pack(indexformatng, *e))
     def append(self, e):
-        self.p.index.append(struct.pack(indexformatng, *e))
+        self.p.index.append(_pack(indexformatng, *e))
 
 class lazymap(object):
     """a lazy version of the node map"""
@@ -268,7 +288,7 @@
                 self.p.loadindex(i)
                 ret = self.p.index[i]
             if isinstance(ret, str):
-                ret = struct.unpack(indexformatng, ret)
+                ret = _unpack(indexformatng, ret)
             yield ret[7]
     def __getitem__(self, key):
         try:
@@ -301,20 +321,20 @@
         while off + s <= l:
             cur = data[off:off + s]
             off += s
-            e = struct.unpack(indexformatv0, cur)
+            e = _unpack(indexformatv0, cur)
             # transform to revlogv1 format
             e2 = (offset_type(e[0], 0), e[1], -1, e[2], e[3],
-                  nodemap[e[4]], nodemap[e[5]], e[6])
+                  nodemap.get(e[4], nullrev), nodemap.get(e[5], nullrev), e[6])
             index.append(e2)
             nodemap[e[6]] = n
             n += 1
 
         return index, nodemap, None
 
-    def packentry(self, entry, node, version):
+    def packentry(self, entry, node, version, rev):
         e2 = (getoffset(entry[0]), entry[1], entry[3], entry[4],
               node(entry[5]), node(entry[6]), entry[7])
-        return struct.pack(indexformatv0, *e2)
+        return _pack(indexformatv0, *e2)
 
 # index ng:
 # 6 bytes offset
@@ -340,7 +360,7 @@
         except AttributeError:
             size = 0
 
-        if lazyparser.safe_to_use and not inline and size > 1000000:
+        if util.openhardlinks() and not inline and size > 1000000:
             # big index, let's parse it on demand
             parser = lazyparser(fp, size)
             index = lazyindex(parser)
@@ -359,12 +379,11 @@
         # if we're not using lazymap, always read the whole index
         data = fp.read()
         l = len(data) - s
-        unpack = struct.unpack
         append = index.append
         if inline:
             cache = (0, data)
             while off <= l:
-                e = unpack(indexformatng, data[off:off + s])
+                e = _unpack(indexformatng, data[off:off + s])
                 nodemap[e[7]] = n
                 append(e)
                 n += 1
@@ -373,7 +392,7 @@
                 off += e[1] + s
         else:
             while off <= l:
-                e = unpack(indexformatng, data[off:off + s])
+                e = _unpack(indexformatng, data[off:off + s])
                 nodemap[e[7]] = n
                 append(e)
                 n += 1
@@ -386,10 +405,10 @@
 
         return index, nodemap, cache
 
-    def packentry(self, entry, node, version):
-        p = struct.pack(indexformatng, *entry)
-        if not entry[3] and not getoffset(entry[0]) and entry[5] == nullrev:
-            p = struct.pack(versionformat, version) + p[4:]
+    def packentry(self, entry, node, version, rev):
+        p = _pack(indexformatng, *entry)
+        if rev == 0:
+            p = _pack(versionformat, version) + p[4:]
         return p
 
 class revlog(object):
@@ -500,7 +519,7 @@
         try:
             return self.nodemap[node]
         except KeyError:
-            raise LookupError(_('%s: no node %s') % (self.indexfile, hex(node)))
+            raise LookupError(hex(node), _('%s: no node %s') % (self.indexfile, hex(node)))
     def node(self, rev):
         return self.index[rev][7]
     def linkrev(self, node):
@@ -511,7 +530,7 @@
     def parentrevs(self, rev):
         return self.index[rev][5:7]
     def start(self, rev):
-        return getoffset(self.index[rev][0])
+        return int(self.index[rev][0] >> 16)
     def end(self, rev):
         return self.start(rev) + self.length(rev)
     def length(self, rev):
@@ -820,7 +839,8 @@
                 for n in self.nodemap:
                     if n.startswith(bin_id) and hex(n).startswith(id):
                         if node is not None:
-                            raise LookupError(_("Ambiguous identifier"))
+                            raise LookupError(hex(node),
+                                              _("Ambiguous identifier"))
                         node = n
                 if node is not None:
                     return node
@@ -839,7 +859,7 @@
         if n:
             return n
 
-        raise LookupError(_("No match found"))
+        raise LookupError(id, _("No match found"))
 
     def cmp(self, node, text):
         """compare text with a given file revision"""
@@ -847,12 +867,7 @@
         return hash(text, p1, p2) != node
 
     def chunk(self, rev, df=None):
-        start, length = self.start(rev), self.length(rev)
-        if self._inline:
-            start += (rev + 1) * self._io.size
-        end = start + length
         def loadcache(df):
-            cache_length = max(65536, length)
             if not df:
                 if self._inline:
                     df = self.opener(self.indexfile)
@@ -861,21 +876,29 @@
             df.seek(start)
             self._chunkcache = (start, df.read(cache_length))
 
-        if not self._chunkcache:
-            loadcache(df)
+        start, length = self.start(rev), self.length(rev)
+        if self._inline:
+            start += (rev + 1) * self._io.size
+        end = start + length
 
-        cache_start = self._chunkcache[0]
-        cache_end = cache_start + len(self._chunkcache[1])
-        if start >= cache_start and end <= cache_end:
-            # it is cached
-            offset = start - cache_start
+        offset = 0
+        if not self._chunkcache:
+            cache_length = max(65536, length)
+            loadcache(df)
         else:
-            loadcache(df)
-            offset = 0
+            cache_start = self._chunkcache[0]
+            cache_length = len(self._chunkcache[1])
+            cache_end = cache_start + cache_length
+            if start >= cache_start and end <= cache_end:
+                # it is cached
+                offset = start - cache_start
+            else:
+                cache_length = max(65536, length)
+                loadcache(df)
 
         # avoid copying large chunks
         c = self._chunkcache[1]
-        if len(c) > length:
+        if cache_length != length:
             c = c[offset:offset + length]
 
         return decompress(c)
@@ -887,26 +910,29 @@
 
     def revdiff(self, rev1, rev2):
         """return or calculate a delta between two revisions"""
-        b1 = self.base(rev1)
-        b2 = self.base(rev2)
-        if b1 == b2 and rev1 + 1 == rev2:
+        if rev1 + 1 == rev2 and self.base(rev1) == self.base(rev2):
             return self.chunk(rev2)
-        else:
-            return mdiff.textdiff(self.revision(self.node(rev1)),
-                                  self.revision(self.node(rev2)))
+
+        return mdiff.textdiff(self.revision(self.node(rev1)),
+                              self.revision(self.node(rev2)))
 
     def revision(self, node):
         """return an uncompressed revision of a given"""
         if node == nullid:
             return ""
         if self._cache and self._cache[0] == node:
-            return self._cache[2]
+            return str(self._cache[2])
 
         # look up what we need to read
         text = None
         rev = self.rev(node)
         base = self.base(rev)
 
+        # check rev flags
+        if self.index[rev][0] & 0xFFFF:
+            raise RevlogError(_('incompatible revision flag %x') %
+                              (self.index[rev][0] & 0xFFFF))
+
         if self._inline:
             # we probably have the whole chunk cached
             df = None
@@ -916,7 +942,7 @@
         # do we have useful data cached?
         if self._cache and self._cache[1] >= base and self._cache[1] < rev:
             base = self._cache[1]
-            text = self._cache[2]
+            text = str(self._cache[2])
             self._loadindex(base, rev + 1)
         else:
             self._loadindex(base, rev + 1)
@@ -964,7 +990,7 @@
         self.version &= ~(REVLOGNGINLINEDATA)
         self._inline = False
         for i in xrange(self.count()):
-            e = self._io.packentry(self.index[i], self.node, self.version)
+            e = self._io.packentry(self.index[i], self.node, self.version, i)
             fp.write(e)
 
         # if we don't call rename, the temp file will never replace the
@@ -1019,7 +1045,7 @@
         self.index.insert(-1, e)
         self.nodemap[node] = curr
 
-        entry = self._io.packentry(e, self.node, self.version)
+        entry = self._io.packentry(e, self.node, self.version, curr)
         if not self._inline:
             transaction.add(self.datafile, offset)
             transaction.add(self.indexfile, curr * len(entry))
@@ -1030,7 +1056,7 @@
             ifh.write(entry)
         else:
             offset += curr * self._io.size
-            transaction.add(self.indexfile, offset, prev)
+            transaction.add(self.indexfile, offset, curr)
             ifh.write(entry)
             ifh.write(data[0])
             ifh.write(data[1])
@@ -1079,10 +1105,23 @@
             if infocollect is not None:
                 infocollect(nb)
 
-            d = self.revdiff(a, b)
             p = self.parents(nb)
             meta = nb + p[0] + p[1] + lookup(nb)
-            yield changegroup.genchunk("%s%s" % (meta, d))
+            if a == -1:
+                d = self.revision(nb)
+                meta += mdiff.trivialdiffheader(len(d))
+            else:
+                d = self.revdiff(a, b)
+            yield changegroup.chunkheader(len(meta) + len(d))
+            yield meta
+            if len(d) > 2**20:
+                pos = 0
+                while pos < len(d):
+                    pos2 = pos + 2 ** 18
+                    yield d[pos:pos2]
+                    pos = pos2
+            else:
+                yield d
 
         yield changegroup.closechunk()
 
@@ -1126,17 +1165,18 @@
                 #    raise RevlogError(_("already have %s") % hex(node[:4]))
                 chain = node
                 continue
-            delta = chunk[80:]
+            delta = buffer(chunk, 80)
+            del chunk
 
             for p in (p1, p2):
                 if not p in self.nodemap:
-                    raise LookupError(_("unknown parent %s") % short(p))
+                    raise LookupError(hex(p), _("unknown parent %s") % short(p))
 
             if not chain:
                 # retrieve the parent revision of the delta chain
                 chain = p1
                 if not chain in self.nodemap:
-                    raise LookupError(_("unknown base %s") % short(chain[:4]))
+                    raise LookupError(hex(chain), _("unknown base %s") % short(chain[:4]))
 
             # full versions are inserted when the needed deltas become
             # comparable to the uncompressed text or when the previous
@@ -1145,17 +1185,22 @@
             # current size.
 
             if chain == prev:
-                tempd = compress(delta)
-                cdelta = tempd[0] + tempd[1]
+                cdelta = compress(delta)
+                cdeltalen = len(cdelta[0]) + len(cdelta[1])
                 textlen = mdiff.patchedsize(textlen, delta)
 
-            if chain != prev or (end - start + len(cdelta)) > textlen * 2:
+            if chain != prev or (end - start + cdeltalen) > textlen * 2:
                 # flush our writes here so we can read it in revision
                 if dfh:
                     dfh.flush()
                 ifh.flush()
                 text = self.revision(chain)
-                text = mdiff.patches(text, [delta])
+                if len(text) == 0:
+                    # skip over trivial delta header
+                    text = buffer(delta, 12)
+                else:
+                    text = mdiff.patches(text, [delta])
+                del delta
                 chk = self._addrevision(text, transaction, link, p1, p2, None,
                                         ifh, dfh)
                 if not dfh and not self._inline:
@@ -1167,20 +1212,22 @@
                     raise RevlogError(_("consistency error adding group"))
                 textlen = len(text)
             else:
-                e = (offset_type(end, 0), len(cdelta), textlen, base,
+                e = (offset_type(end, 0), cdeltalen, textlen, base,
                      link, self.rev(p1), self.rev(p2), node)
                 self.index.insert(-1, e)
                 self.nodemap[node] = r
-                entry = self._io.packentry(e, self.node, self.version)
+                entry = self._io.packentry(e, self.node, self.version, r)
                 if self._inline:
                     ifh.write(entry)
-                    ifh.write(cdelta)
+                    ifh.write(cdelta[0])
+                    ifh.write(cdelta[1])
                     self.checkinlinesize(transaction, ifh)
                     if not self._inline:
                         dfh = self.opener(self.datafile, "a")
                         ifh = self.opener(self.indexfile, "a")
                 else:
-                    dfh.write(cdelta)
+                    dfh.write(cdelta[0])
+                    dfh.write(cdelta[1])
                     ifh.write(entry)
 
             t, r, chain, prev = r, r + 1, node, node
@@ -1190,21 +1237,31 @@
 
         return node
 
-    def strip(self, rev, minlink):
-        if self.count() == 0 or rev >= self.count():
+    def strip(self, minlink):
+        """truncate the revlog on the first revision with a linkrev >= minlink
+
+        This function is called when we're stripping revision minlink and
+        its descendants from the repository.
+
+        We have to remove all revisions with linkrev >= minlink, because
+        the equivalent changelog revisions will be renumbered after the
+        strip.
+
+        So we truncate the revlog on the first of these revisions, and
+        trust that the caller has saved the revisions that shouldn't be
+        removed and that it'll readd them after this truncation.
+        """
+        if self.count() == 0:
             return
 
         if isinstance(self.index, lazyindex):
             self._loadindexmap()
 
-        # When stripping away a revision, we need to make sure it
-        # does not actually belong to an older changeset.
-        # The minlink parameter defines the oldest revision
-        # we're allowed to strip away.
-        while minlink > self.index[rev][4]:
-            rev += 1
-            if rev >= self.count():
-                return
+        for rev in xrange(0, self.count()):
+            if self.index[rev][4] >= minlink:
+                break
+        else:
+            return
 
         # first truncate the files on disk
         end = self.start(rev)
@@ -1229,7 +1286,7 @@
     def checksize(self):
         expected = 0
         if self.count():
-            expected = self.end(self.count() - 1)
+            expected = max(0, self.end(self.count() - 1))
 
         try:
             f = self.opener(self.datafile)
@@ -1246,12 +1303,12 @@
             f.seek(0, 2)
             actual = f.tell()
             s = self._io.size
-            i = actual / s
+            i = max(0, actual / s)
             di = actual - (i * s)
             if self._inline:
                 databytes = 0
                 for r in xrange(self.count()):
-                    databytes += self.length(r)
+                    databytes += max(0, self.length(r))
                 dd = 0
                 di = actual - self.count() * s - databytes
         except IOError, inst:
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/simplemerge.py	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,456 @@
+#!/usr/bin/env python
+# Copyright (C) 2004, 2005 Canonical Ltd
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
+
+# mbp: "you know that thing where cvs gives you conflict markers?"
+# s: "i hate that."
+
+from i18n import _
+import util, mdiff, fancyopts, sys, os
+
+class CantReprocessAndShowBase(Exception):
+    pass
+
+def warn(message):
+    sys.stdout.flush()
+    sys.stderr.write(message)
+    sys.stderr.flush()
+
+def intersect(ra, rb):
+    """Given two ranges return the range where they intersect or None.
+
+    >>> intersect((0, 10), (0, 6))
+    (0, 6)
+    >>> intersect((0, 10), (5, 15))
+    (5, 10)
+    >>> intersect((0, 10), (10, 15))
+    >>> intersect((0, 9), (10, 15))
+    >>> intersect((0, 9), (7, 15))
+    (7, 9)
+    """
+    assert ra[0] <= ra[1]
+    assert rb[0] <= rb[1]
+
+    sa = max(ra[0], rb[0])
+    sb = min(ra[1], rb[1])
+    if sa < sb:
+        return sa, sb
+    else:
+        return None
+
+def compare_range(a, astart, aend, b, bstart, bend):
+    """Compare a[astart:aend] == b[bstart:bend], without slicing.
+    """
+    if (aend-astart) != (bend-bstart):
+        return False
+    for ia, ib in zip(xrange(astart, aend), xrange(bstart, bend)):
+        if a[ia] != b[ib]:
+            return False
+    else:
+        return True
+
+class Merge3Text(object):
+    """3-way merge of texts.
+
+    Given strings BASE, OTHER, THIS, tries to produce a combined text
+    incorporating the changes from both BASE->OTHER and BASE->THIS."""
+    def __init__(self, basetext, atext, btext, base=None, a=None, b=None):
+        self.basetext = basetext
+        self.atext = atext
+        self.btext = btext
+        if base is None:
+            base = mdiff.splitnewlines(basetext)
+        if a is None:
+            a = mdiff.splitnewlines(atext)
+        if b is None:
+            b = mdiff.splitnewlines(btext)
+        self.base = base
+        self.a = a
+        self.b = b
+
+    def merge_lines(self,
+                    name_a=None,
+                    name_b=None,
+                    name_base=None,
+                    start_marker='<<<<<<<',
+                    mid_marker='=======',
+                    end_marker='>>>>>>>',
+                    base_marker=None,
+                    reprocess=False):
+        """Return merge in cvs-like form.
+        """
+        self.conflicts = False
+        newline = '\n'
+        if len(self.a) > 0:
+            if self.a[0].endswith('\r\n'):
+                newline = '\r\n'
+            elif self.a[0].endswith('\r'):
+                newline = '\r'
+        if base_marker and reprocess:
+            raise CantReprocessAndShowBase()
+        if name_a:
+            start_marker = start_marker + ' ' + name_a
+        if name_b:
+            end_marker = end_marker + ' ' + name_b
+        if name_base and base_marker:
+            base_marker = base_marker + ' ' + name_base
+        merge_regions = self.merge_regions()
+        if reprocess is True:
+            merge_regions = self.reprocess_merge_regions(merge_regions)
+        for t in merge_regions:
+            what = t[0]
+            if what == 'unchanged':
+                for i in range(t[1], t[2]):
+                    yield self.base[i]
+            elif what == 'a' or what == 'same':
+                for i in range(t[1], t[2]):
+                    yield self.a[i]
+            elif what == 'b':
+                for i in range(t[1], t[2]):
+                    yield self.b[i]
+            elif what == 'conflict':
+                self.conflicts = True
+                yield start_marker + newline
+                for i in range(t[3], t[4]):
+                    yield self.a[i]
+                if base_marker is not None:
+                    yield base_marker + newline
+                    for i in range(t[1], t[2]):
+                        yield self.base[i]
+                yield mid_marker + newline
+                for i in range(t[5], t[6]):
+                    yield self.b[i]
+                yield end_marker + newline
+            else:
+                raise ValueError(what)
+
+    def merge_annotated(self):
+        """Return merge with conflicts, showing origin of lines.
+
+        Most useful for debugging merge.
+        """
+        for t in self.merge_regions():
+            what = t[0]
+            if what == 'unchanged':
+                for i in range(t[1], t[2]):
+                    yield 'u | ' + self.base[i]
+            elif what == 'a' or what == 'same':
+                for i in range(t[1], t[2]):
+                    yield what[0] + ' | ' + self.a[i]
+            elif what == 'b':
+                for i in range(t[1], t[2]):
+                    yield 'b | ' + self.b[i]
+            elif what == 'conflict':
+                yield '<<<<\n'
+                for i in range(t[3], t[4]):
+                    yield 'A | ' + self.a[i]
+                yield '----\n'
+                for i in range(t[5], t[6]):
+                    yield 'B | ' + self.b[i]
+                yield '>>>>\n'
+            else:
+                raise ValueError(what)
+
+    def merge_groups(self):
+        """Yield sequence of line groups.  Each one is a tuple:
+
+        'unchanged', lines
+             Lines unchanged from base
+
+        'a', lines
+             Lines taken from a
+
+        'same', lines
+             Lines taken from a (and equal to b)
+
+        'b', lines
+             Lines taken from b
+
+        'conflict', base_lines, a_lines, b_lines
+             Lines from base were changed to either a or b and conflict.
+        """
+        for t in self.merge_regions():
+            what = t[0]
+            if what == 'unchanged':
+                yield what, self.base[t[1]:t[2]]
+            elif what == 'a' or what == 'same':
+                yield what, self.a[t[1]:t[2]]
+            elif what == 'b':
+                yield what, self.b[t[1]:t[2]]
+            elif what == 'conflict':
+                yield (what,
+                       self.base[t[1]:t[2]],
+                       self.a[t[3]:t[4]],
+                       self.b[t[5]:t[6]])
+            else:
+                raise ValueError(what)
+
+    def merge_regions(self):
+        """Return sequences of matching and conflicting regions.
+
+        This returns tuples, where the first value says what kind we
+        have:
+
+        'unchanged', start, end
+             Take a region of base[start:end]
+
+        'same', astart, aend
+             b and a are different from base but give the same result
+
+        'a', start, end
+             Non-clashing insertion from a[start:end]
+
+        Method is as follows:
+
+        The two sequences align only on regions which match the base
+        and both descendents.  These are found by doing a two-way diff
+        of each one against the base, and then finding the
+        intersections between those regions.  These "sync regions"
+        are by definition unchanged in both and easily dealt with.
+
+        The regions in between can be in any of three cases:
+        conflicted, or changed on only one side.
+        """
+
+        # section a[0:ia] has been disposed of, etc
+        iz = ia = ib = 0
+
+        for zmatch, zend, amatch, aend, bmatch, bend in self.find_sync_regions():
+            #print 'match base [%d:%d]' % (zmatch, zend)
+
+            matchlen = zend - zmatch
+            assert matchlen >= 0
+            assert matchlen == (aend - amatch)
+            assert matchlen == (bend - bmatch)
+
+            len_a = amatch - ia
+            len_b = bmatch - ib
+            len_base = zmatch - iz
+            assert len_a >= 0
+            assert len_b >= 0
+            assert len_base >= 0
+
+            #print 'unmatched a=%d, b=%d' % (len_a, len_b)
+
+            if len_a or len_b:
+                # try to avoid actually slicing the lists
+                equal_a = compare_range(self.a, ia, amatch,
+                                        self.base, iz, zmatch)
+                equal_b = compare_range(self.b, ib, bmatch,
+                                        self.base, iz, zmatch)
+                same = compare_range(self.a, ia, amatch,
+                                     self.b, ib, bmatch)
+
+                if same:
+                    yield 'same', ia, amatch
+                elif equal_a and not equal_b:
+                    yield 'b', ib, bmatch
+                elif equal_b and not equal_a:
+                    yield 'a', ia, amatch
+                elif not equal_a and not equal_b:
+                    yield 'conflict', iz, zmatch, ia, amatch, ib, bmatch
+                else:
+                    raise AssertionError("can't handle a=b=base but unmatched")
+
+                ia = amatch
+                ib = bmatch
+            iz = zmatch
+
+            # if the same part of the base was deleted on both sides
+            # that's OK, we can just skip it.
+
+
+            if matchlen > 0:
+                assert ia == amatch
+                assert ib == bmatch
+                assert iz == zmatch
+
+                yield 'unchanged', zmatch, zend
+                iz = zend
+                ia = aend
+                ib = bend
+
+    def reprocess_merge_regions(self, merge_regions):
+        """Where there are conflict regions, remove the agreed lines.
+
+        Lines where both A and B have made the same changes are
+        eliminated.
+        """
+        for region in merge_regions:
+            if region[0] != "conflict":
+                yield region
+                continue
+            type, iz, zmatch, ia, amatch, ib, bmatch = region
+            a_region = self.a[ia:amatch]
+            b_region = self.b[ib:bmatch]
+            matches = mdiff.get_matching_blocks(''.join(a_region),
+                                                ''.join(b_region))
+            next_a = ia
+            next_b = ib
+            for region_ia, region_ib, region_len in matches[:-1]:
+                region_ia += ia
+                region_ib += ib
+                reg = self.mismatch_region(next_a, region_ia, next_b,
+                                           region_ib)
+                if reg is not None:
+                    yield reg
+                yield 'same', region_ia, region_len+region_ia
+                next_a = region_ia + region_len
+                next_b = region_ib + region_len
+            reg = self.mismatch_region(next_a, amatch, next_b, bmatch)
+            if reg is not None:
+                yield reg
+
+    def mismatch_region(next_a, region_ia,  next_b, region_ib):
+        if next_a < region_ia or next_b < region_ib:
+            return 'conflict', None, None, next_a, region_ia, next_b, region_ib
+    mismatch_region = staticmethod(mismatch_region)
+
+    def find_sync_regions(self):
+        """Return a list of sync regions, where both descendents match the base.
+
+        Generates a list of (base1, base2, a1, a2, b1, b2).  There is
+        always a zero-length sync region at the end of all the files.
+        """
+
+        ia = ib = 0
+        amatches = mdiff.get_matching_blocks(self.basetext, self.atext)
+        bmatches = mdiff.get_matching_blocks(self.basetext, self.btext)
+        len_a = len(amatches)
+        len_b = len(bmatches)
+
+        sl = []
+
+        while ia < len_a and ib < len_b:
+            abase, amatch, alen = amatches[ia]
+            bbase, bmatch, blen = bmatches[ib]
+
+            # there is an unconflicted block at i; how long does it
+            # extend?  until whichever one ends earlier.
+            i = intersect((abase, abase+alen), (bbase, bbase+blen))
+            if i:
+                intbase = i[0]
+                intend = i[1]
+                intlen = intend - intbase
+
+                # found a match of base[i[0], i[1]]; this may be less than
+                # the region that matches in either one
+                assert intlen <= alen
+                assert intlen <= blen
+                assert abase <= intbase
+                assert bbase <= intbase
+
+                asub = amatch + (intbase - abase)
+                bsub = bmatch + (intbase - bbase)
+                aend = asub + intlen
+                bend = bsub + intlen
+
+                assert self.base[intbase:intend] == self.a[asub:aend], \
+                       (self.base[intbase:intend], self.a[asub:aend])
+
+                assert self.base[intbase:intend] == self.b[bsub:bend]
+
+                sl.append((intbase, intend,
+                           asub, aend,
+                           bsub, bend))
+
+            # advance whichever one ends first in the base text
+            if (abase + alen) < (bbase + blen):
+                ia += 1
+            else:
+                ib += 1
+
+        intbase = len(self.base)
+        abase = len(self.a)
+        bbase = len(self.b)
+        sl.append((intbase, intbase, abase, abase, bbase, bbase))
+
+        return sl
+
+    def find_unconflicted(self):
+        """Return a list of ranges in base that are not conflicted."""
+        am = mdiff.get_matching_blocks(self.basetext, self.atext)
+        bm = mdiff.get_matching_blocks(self.basetext, self.btext)
+
+        unc = []
+
+        while am and bm:
+            # there is an unconflicted block at i; how long does it
+            # extend?  until whichever one ends earlier.
+            a1 = am[0][0]
+            a2 = a1 + am[0][2]
+            b1 = bm[0][0]
+            b2 = b1 + bm[0][2]
+            i = intersect((a1, a2), (b1, b2))
+            if i:
+                unc.append(i)
+
+            if a2 < b2:
+                del am[0]
+            else:
+                del bm[0]
+
+        return unc
+
+def simplemerge(local, base, other, **opts):
+    def readfile(filename):
+        f = open(filename, "rb")
+        text = f.read()
+        f.close()
+        if util.binary(text):
+            msg = _("%s looks like a binary file.") % filename
+            if not opts.get('text'):
+                raise util.Abort(msg)
+            elif not opts.get('quiet'):
+                warn(_('warning: %s\n') % msg)
+        return text
+
+    name_a = local
+    name_b = other
+    labels = opts.get('label', [])
+    if labels:
+        name_a = labels.pop(0)
+    if labels:
+        name_b = labels.pop(0)
+    if labels:
+        raise util.Abort(_("can only specify two labels."))
+
+    localtext = readfile(local)
+    basetext = readfile(base)
+    othertext = readfile(other)
+
+    orig = local
+    local = os.path.realpath(local)
+    if not opts.get('print'):
+        opener = util.opener(os.path.dirname(local))
+        out = opener(os.path.basename(local), "w", atomictemp=True)
+    else:
+        out = sys.stdout
+
+    reprocess = not opts.get('no_minimal')
+
+    m3 = Merge3Text(basetext, localtext, othertext)
+    for line in m3.merge_lines(name_a=name_a, name_b=name_b,
+                               reprocess=reprocess):
+        out.write(line)
+
+    if not opts.get('print'):
+        out.rename()
+
+    if m3.conflicts:
+        if not opts.get('quiet'):
+            warn(_("warning: conflicts during merge.\n"))
+        return 1
--- a/mercurial/sshrepo.py	Thu Jul 26 07:56:27 2007 -0400
+++ b/mercurial/sshrepo.py	Wed Feb 06 19:57:52 2008 -0800
@@ -8,7 +8,7 @@
 from node import *
 from remoterepo import *
 from i18n import _
-import hg, os, re, stat, util
+import repo, os, re, stat, util
 
 class sshrepository(remoterepository):
     def __init__(self, ui, path, create=0):
@@ -17,27 +17,26 @@
 
         m = re.match(r'^ssh://(([^@]+)@)?([^:/]+)(:(\d+))?(/(.*))?$', path)
         if not m:
-            self.raise_(hg.RepoError(_("couldn't parse location %s") % path))
+            self.raise_(repo.RepoError(_("couldn't parse location %s") % path))
 
         self.user = m.group(2)
         self.host = m.group(3)
         self.port = m.group(5)
         self.path = m.group(7) or "."
 
-        args = self.user and ("%s@%s" % (self.user, self.host)) or self.host
-        args = self.port and ("%s -p %s") % (args, self.port) or args
-
         sshcmd = self.ui.config("ui", "ssh", "ssh")
         remotecmd = self.ui.config("ui", "remotecmd", "hg")
 
+        args = util.sshargs(sshcmd, self.host, self.user, self.port)
+
         if create:
             cmd = '%s %s "%s init %s"'
             cmd = cmd % (sshcmd, args, remotecmd, self.path)
 
             ui.note('running %s\n' % cmd)
-            res = os.system(cmd)
+            res = util.system(cmd)
             if res != 0:
-                self.raise_(hg.RepoError(_("could not create remote repo")))
+                self.raise_(repo.RepoError(_("could not create remote repo")))
 
         self.validate_repo(ui, sshcmd, args, remotecmd)
 
@@ -51,6 +50,7 @@
         cmd = '%s %s "%s -R %s serve --stdio"'
         cmd = cmd % (sshcmd, args, remotecmd, self.path)
 
+        cmd = util.quotecommand(cmd)
         ui.note('running %s\n' % cmd)
         self.pipeo, self.pipei, self.pipee = os.popen3(cmd, 'b')
 
@@ -69,13 +69,13 @@
             lines.append(l)
             max_noise -= 1
         else:
-            self.raise_(hg.RepoError(_("no suitable response from remote hg")))
+            self.raise_(repo.RepoError(_("no suitable response from remote hg")))
 
-        self.capabilities = ()
+        self.capabilities = util.set()
         lines.reverse()
         for l in lines:
             if l.startswith("capabilities:"):
-                self.capabilities = l[:-1].split(":")[1].split()
+                self.capabilities.update(l[:-1].split(":")[1].split())
                 break
 
     def readerr(self):
@@ -114,14 +114,25 @@
         return self.pipei
 
     def call(self, cmd, **args):
-        r = self.do_cmd(cmd, **args)
-        l = r.readline()
+        self.do_cmd(cmd, **args)
+        return self._recv()
+
+    def _recv(self):
+        l = self.pipei.readline()
         self.readerr()
         try:
             l = int(l)
         except:
             self.raise_(util.UnexpectedOutput(_("unexpected response:"), l))
-        return r.read(l)
+        return self.pipei.read(l)
+
+    def _send(self, data, flush=False):
+        self.pipeo.write("%d\n" % len(data))
+        if data:
+            self.pipeo.write(data)
+        if flush:
+            self.pipeo.flush()
+        self.readerr()
 
     def lock(self):
         self.call("lock")
@@ -131,12 +142,13 @@
         self.call("unlock")
 
     def lookup(self, key):
+        self.requirecap('lookup', _('look up remote revision'))
         d = self.call("lookup", key=key)
         success, data = d[:-1].split(" ", 1)
         if int(success):
             return bin(data)
         else:
-            self.raise_(hg.RepoError(data))
+            self.raise_(repo.RepoError(data))
 
     def heads(self):
         d = self.call("heads")
@@ -168,6 +180,7 @@
         return self.do_cmd("changegroup", roots=n)
 
     def changegroupsubset(self, bases, heads, kind):
+        self.requirecap('changegroupsubset', _('look up remote changes'))
         bases = " ".join(map(hex, bases))
         heads = " ".join(map(hex, heads))
         return self.do_cmd("changegroupsubset", bases=bases, heads=heads)
@@ -175,47 +188,49 @@
     def unbundle(self, cg, heads, source):
         d = self.call("unbundle", heads=' '.join(map(hex, heads)))
         if d:
-            self.raise_(hg.RepoError(_("push refused: %s") % d))
+            # remote may send "unsynced changes"
+            self.raise_(repo.RepoError(_("push refused: %s") % d))
 
         while 1:
             d = cg.read(4096)
-            if not d: break
-            self.pipeo.write(str(len(d)) + '\n')
-            self.pipeo.write(d)
-            self.readerr()
+            if not d:
+                break
+            self._send(d)
 
-        self.pipeo.write('0\n')
-        self.pipeo.flush()
+        self._send("", flush=True)
 
-        self.readerr()
-        d = self.pipei.readline()
-        if d != '\n':
-            return 1
+        r = self._recv()
+        if r:
+            # remote may send "unsynced changes"
+            self.raise_(repo.RepoError(_("push failed: %s") % r))
 
-        l = int(self.pipei.readline())
-        r = self.pipei.read(l)
-        if not r:
-            return 1
-        return int(r)
+        r = self._recv()
+        try:
+            return int(r)
+        except:
+            self.raise_(util.UnexpectedOutput(_("unexpected response:"), r))
 
     def addchangegroup(self, cg, source, url):
         d = self.call("addchangegroup")
         if d:
-            self.raise_(hg.RepoError(_("push refused: %s") % d))
+            self.raise_(repo.RepoError(_("push refused: %s") % d))
         while 1:
             d = cg.read(4096)
-            if not d: break
+            if not d:
+                break
             self.pipeo.write(d)
             self.readerr()
 
         self.pipeo.flush()
 
         self.readerr()
-        l = int(self.pipei.readline())
-        r = self.pipei.read(l)
+        r = self._recv()
         if not r:
             return 1
-        return int(r)
+        try:
+            return int(r)
+        except:
+            self.raise_(util.UnexpectedOutput(_("unexpected response:"), r))
 
     def stream_out(self):
         return self.do_cmd('stream_out')
--- a/mercurial/sshserver.py	Thu Jul 26 07:56:27 2007 -0400
+++ b/mercurial/sshserver.py	Wed Feb 06 19:57:52 2008 -0800
@@ -8,7 +8,7 @@
 
 from i18n import _
 from node import *
-import os, streamclone, sys, tempfile, util
+import os, streamclone, sys, tempfile, util, hook
 
 class sshserver(object):
     def __init__(self, ui, repo):
@@ -18,6 +18,7 @@
         self.fin = sys.stdin
         self.fout = sys.stdout
 
+        hook.redirect(True)
         sys.stdout = sys.stderr
 
         # Prevent insertion/deletion of CRs
--- a/mercurial/statichttprepo.py	Thu Jul 26 07:56:27 2007 -0400
+++ b/mercurial/statichttprepo.py	Wed Feb 06 19:57:52 2008 -0800
@@ -33,7 +33,7 @@
         self._url = path
         self.ui = ui
 
-        self.path = (path + "/.hg")
+        self.path = path.rstrip('/') + "/.hg"
         self.opener = opener(self.path)
         # find requirements
         try:
--- a/mercurial/streamclone.py	Thu Jul 26 07:56:27 2007 -0400
+++ b/mercurial/streamclone.py	Wed Feb 06 19:57:52 2008 -0800
@@ -6,7 +6,7 @@
 # of the GNU General Public License, incorporated herein by reference.
 
 from i18n import _
-import os, stat, util, lock
+import os, osutil, stat, util, lock
 
 # if server supports streaming clone, it advertises "stream"
 # capability with value that is version+flags of repo it is serving.
@@ -19,17 +19,14 @@
 
     strip_count = len(root) + len(os.sep)
     def walk(path, recurse):
-        ents = os.listdir(path)
-        ents.sort()
-        for e in ents:
+        for e, kind, st in osutil.listdir(path, stat=True):
             pe = os.path.join(path, e)
-            st = os.lstat(pe)
-            if stat.S_ISDIR(st.st_mode):
+            if kind == stat.S_IFDIR:
                 if recurse:
                     for x in walk(pe, True):
                         yield x
             else:
-                if not stat.S_ISREG(st.st_mode) or len(e) < 2:
+                if kind != stat.S_IFREG or len(e) < 2:
                     continue
                 sfx = e[-2:]
                 if sfx in ('.d', '.i'):
@@ -66,7 +63,7 @@
 
     # get consistent snapshot of repo. lock during scan so lock not
     # needed while we stream, and commits can happen.
-    lock = None
+    repolock = None
     try:
         try:
             repolock = repo.lock()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/templatefilters.py	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,155 @@
+# template-filters.py - common template expansion filters
+#
+# Copyright 2005-2008 Matt Mackall <mpm@selenic.com>
+#
+# This software may be used and distributed according to the terms
+# of the GNU General Public License, incorporated herein by reference.
+
+import cgi, re, os, time, urllib, textwrap
+import util, templater
+
+agescales = [("second", 1),
+             ("minute", 60),
+             ("hour", 3600),
+             ("day", 3600 * 24),
+             ("week", 3600 * 24 * 7),
+             ("month", 3600 * 24 * 30),
+             ("year", 3600 * 24 * 365)]
+
+agescales.reverse()
+
+def age(date):
+    '''turn a (timestamp, tzoff) tuple into an age string.'''
+
+    def plural(t, c):
+        if c == 1:
+            return t
+        return t + "s"
+    def fmt(t, c):
+        return "%d %s" % (c, plural(t, c))
+
+    now = time.time()
+    then = date[0]
+    delta = max(1, int(now - then))
+
+    for t, s in agescales:
+        n = delta / s
+        if n >= 2 or s == 1:
+            return fmt(t, n)
+
+para_re = None
+space_re = None
+
+def fill(text, width):
+    '''fill many paragraphs.'''
+    global para_re, space_re
+    if para_re is None:
+        para_re = re.compile('(\n\n|\n\\s*[-*]\\s*)', re.M)
+        space_re = re.compile(r'  +')
+
+    def findparas():
+        start = 0
+        while True:
+            m = para_re.search(text, start)
+            if not m:
+                w = len(text)
+                while w > start and text[w-1].isspace(): w -= 1
+                yield text[start:w], text[w:]
+                break
+            yield text[start:m.start(0)], m.group(1)
+            start = m.end(1)
+
+    return "".join([space_re.sub(' ', textwrap.fill(para, width)) + rest
+                    for para, rest in findparas()])
+
+def firstline(text):
+    '''return the first line of text'''
+    try:
+        return text.splitlines(1)[0].rstrip('\r\n')
+    except IndexError:
+        return ''
+
+def isodate(date):
+    '''turn a (timestamp, tzoff) tuple into an iso 8631 date and time.'''
+    return util.datestr(date, format='%Y-%m-%d %H:%M')
+
+def hgdate(date):
+    '''turn a (timestamp, tzoff) tuple into an hg cset timestamp.'''
+    return "%d %d" % date
+
+def nl2br(text):
+    '''replace raw newlines with xhtml line breaks.'''
+    return text.replace('\n', '<br/>\n')
+
+def obfuscate(text):
+    text = unicode(text, util._encoding, 'replace')
+    return ''.join(['&#%d;' % ord(c) for c in text])
+
+def domain(author):
+    '''get domain of author, or empty string if none.'''
+    f = author.find('@')
+    if f == -1: return ''
+    author = author[f+1:]
+    f = author.find('>')
+    if f >= 0: author = author[:f]
+    return author
+
+def person(author):
+    '''get name of author, or else username.'''
+    f = author.find('<')
+    if f == -1: return util.shortuser(author)
+    return author[:f].rstrip()
+
+def shortdate(date):
+    '''turn (timestamp, tzoff) tuple into iso 8631 date.'''
+    return util.datestr(date, format='%Y-%m-%d', timezone=False)
+
+def indent(text, prefix):
+    '''indent each non-empty line of text after first with prefix.'''
+    lines = text.splitlines()
+    num_lines = len(lines)
+    def indenter():
+        for i in xrange(num_lines):
+            l = lines[i]
+            if i and l.strip():
+                yield prefix
+            yield l
+            if i < num_lines - 1 or text.endswith('\n'):
+                yield '\n'
+    return "".join(indenter())
+
+def permissions(flags):
+    if "l" in flags:
+        return "lrwxrwxrwx"
+    if "x" in flags:
+        return "-rwxr-xr-x"
+    return "-rw-r--r--"
+
+filters = {
+    "addbreaks": nl2br,
+    "basename": os.path.basename,
+    "age": age,
+    "date": lambda x: util.datestr(x),
+    "domain": domain,
+    "email": util.email,
+    "escape": lambda x: cgi.escape(x, True),
+    "fill68": lambda x: fill(x, width=68),
+    "fill76": lambda x: fill(x, width=76),
+    "firstline": firstline,
+    "tabindent": lambda x: indent(x, '\t'),
+    "hgdate": hgdate,
+    "isodate": isodate,
+    "obfuscate": obfuscate,
+    "permissions": permissions,
+    "person": person,
+    "rfc822date": lambda x: util.datestr(x, "%a, %d %b %Y %H:%M:%S"),
+    "rfc3339date": lambda x: util.datestr(x, "%Y-%m-%dT%H:%M:%S", True, "%+03d:%02d"),
+    "short": lambda x: x[:12],
+    "shortdate": shortdate,
+    "stringify": templater.stringify,
+    "strip": lambda x: x.strip(),
+    "urlescape": lambda x: urllib.quote(x),
+    "user": lambda x: util.shortuser(x),
+    "stringescape": lambda x: x.encode('string_escape'),
+    }
+
--- a/mercurial/templater.py	Thu Jul 26 07:56:27 2007 -0400
+++ b/mercurial/templater.py	Wed Feb 06 19:57:52 2008 -0800
@@ -6,8 +6,7 @@
 # of the GNU General Public License, incorporated herein by reference.
 
 from i18n import _
-from node import *
-import cgi, re, sys, os, time, urllib, util, textwrap
+import re, sys, os
 
 def parsestring(s, quoted=True):
     '''parse a string using simple c-like syntax.
@@ -82,7 +81,7 @@
         '''perform expansion.
         t is name of map element to expand.
         map is added elements to use during expansion.'''
-        if not self.cache.has_key(t):
+        if not t in self.cache:
             try:
                 self.cache[t] = file(self.map[t]).read()
             except IOError, inst:
@@ -123,162 +122,6 @@
                         v = self.filters[f](v)
                 yield v
 
-agescales = [("second", 1),
-             ("minute", 60),
-             ("hour", 3600),
-             ("day", 3600 * 24),
-             ("week", 3600 * 24 * 7),
-             ("month", 3600 * 24 * 30),
-             ("year", 3600 * 24 * 365)]
-
-agescales.reverse()
-
-def age(date):
-    '''turn a (timestamp, tzoff) tuple into an age string.'''
-
-    def plural(t, c):
-        if c == 1:
-            return t
-        return t + "s"
-    def fmt(t, c):
-        return "%d %s" % (c, plural(t, c))
-
-    now = time.time()
-    then = date[0]
-    delta = max(1, int(now - then))
-
-    for t, s in agescales:
-        n = delta / s
-        if n >= 2 or s == 1:
-            return fmt(t, n)
-
-def stringify(thing):
-    '''turn nested template iterator into string.'''
-    if hasattr(thing, '__iter__'):
-        return "".join([stringify(t) for t in thing if t is not None])
-    return str(thing)
-
-para_re = None
-space_re = None
-
-def fill(text, width):
-    '''fill many paragraphs.'''
-    global para_re, space_re
-    if para_re is None:
-        para_re = re.compile('(\n\n|\n\\s*[-*]\\s*)', re.M)
-        space_re = re.compile(r'  +')
-
-    def findparas():
-        start = 0
-        while True:
-            m = para_re.search(text, start)
-            if not m:
-                w = len(text)
-                while w > start and text[w-1].isspace(): w -= 1
-                yield text[start:w], text[w:]
-                break
-            yield text[start:m.start(0)], m.group(1)
-            start = m.end(1)
-
-    return "".join([space_re.sub(' ', textwrap.fill(para, width)) + rest
-                    for para, rest in findparas()])
-
-def firstline(text):
-    '''return the first line of text'''
-    try:
-        return text.splitlines(1)[0].rstrip('\r\n')
-    except IndexError:
-        return ''
-
-def isodate(date):
-    '''turn a (timestamp, tzoff) tuple into an iso 8631 date and time.'''
-    return util.datestr(date, format='%Y-%m-%d %H:%M')
-
-def hgdate(date):
-    '''turn a (timestamp, tzoff) tuple into an hg cset timestamp.'''
-    return "%d %d" % date
-
-def nl2br(text):
-    '''replace raw newlines with xhtml line breaks.'''
-    return text.replace('\n', '<br/>\n')
-
-def obfuscate(text):
-    text = unicode(text, util._encoding, 'replace')
-    return ''.join(['&#%d;' % ord(c) for c in text])
-
-def domain(author):
-    '''get domain of author, or empty string if none.'''
-    f = author.find('@')
-    if f == -1: return ''
-    author = author[f+1:]
-    f = author.find('>')
-    if f >= 0: author = author[:f]
-    return author
-
-def email(author):
-    '''get email of author.'''
-    r = author.find('>')
-    if r == -1: r = None
-    return author[author.find('<')+1:r]
-
-def person(author):
-    '''get name of author, or else username.'''
-    f = author.find('<')
-    if f == -1: return util.shortuser(author)
-    return author[:f].rstrip()
-
-def shortdate(date):
-    '''turn (timestamp, tzoff) tuple into iso 8631 date.'''
-    return util.datestr(date, format='%Y-%m-%d', timezone=False)
-
-def indent(text, prefix):
-    '''indent each non-empty line of text after first with prefix.'''
-    lines = text.splitlines()
-    num_lines = len(lines)
-    def indenter():
-        for i in xrange(num_lines):
-            l = lines[i]
-            if i and l.strip():
-                yield prefix
-            yield l
-            if i < num_lines - 1 or text.endswith('\n'):
-                yield '\n'
-    return "".join(indenter())
-
-def permissions(flags):
-    if "l" in flags:
-        return "lrwxrwxrwx"
-    if "x" in flags:
-        return "-rwxr-xr-x"
-    return "-rw-r--r--"
-
-common_filters = {
-    "addbreaks": nl2br,
-    "basename": os.path.basename,
-    "age": age,
-    "date": lambda x: util.datestr(x),
-    "domain": domain,
-    "email": email,
-    "escape": lambda x: cgi.escape(x, True),
-    "fill68": lambda x: fill(x, width=68),
-    "fill76": lambda x: fill(x, width=76),
-    "firstline": firstline,
-    "tabindent": lambda x: indent(x, '\t'),
-    "hgdate": hgdate,
-    "isodate": isodate,
-    "obfuscate": obfuscate,
-    "permissions": permissions,
-    "person": person,
-    "rfc822date": lambda x: util.datestr(x, "%a, %d %b %Y %H:%M:%S"),
-    "short": lambda x: x[:12],
-    "shortdate": shortdate,
-    "stringify": stringify,
-    "strip": lambda x: x.strip(),
-    "urlescape": lambda x: urllib.quote(x),
-    "user": lambda x: util.shortuser(x),
-    "stringescape": lambda x: x.encode('string_escape'),
-    }
-
 def templatepath(name=None):
     '''return location of template file or directory (if no name).
     returns None if not found.'''
@@ -295,3 +138,9 @@
         if (name and os.path.exists(p)) or os.path.isdir(p):
             return os.path.normpath(p)
 
+def stringify(thing):
+    '''turn nested template iterator into string.'''
+    if hasattr(thing, '__iter__'):
+        return "".join([stringify(t) for t in thing if t is not None])
+    return str(thing)
+
--- a/mercurial/transaction.py	Thu Jul 26 07:56:27 2007 -0400
+++ b/mercurial/transaction.py	Wed Feb 06 19:57:52 2008 -0800
@@ -18,10 +18,6 @@
     def __init__(self, report, opener, journal, after=None):
         self.journal = None
 
-        # abort here if the journal already exists
-        if os.path.exists(journal):
-            raise AssertionError(_("journal already exists - run hg recover"))
-
         self.count = 1
         self.report = report
         self.opener = opener
--- a/mercurial/ui.py	Thu Jul 26 07:56:27 2007 -0400
+++ b/mercurial/ui.py	Wed Feb 06 19:57:52 2008 -0800
@@ -24,6 +24,8 @@
             dest.set(section, name, value)
 
 class ui(object):
+    _isatty = None
+
     def __init__(self, verbose=False, debug=False, quiet=False,
                  interactive=True, traceback=False, report_untrusted=True,
                  parentui=None):
@@ -62,6 +64,11 @@
     def __getattr__(self, key):
         return getattr(self.parentui, key)
 
+    def isatty(self):
+        if ui._isatty is None:
+            ui._isatty = sys.stdin.isatty()
+        return ui._isatty
+
     def updateopts(self, verbose=False, debug=False, quiet=False,
                    interactive=True, traceback=False, config=[]):
         for section, name, value in config:
@@ -197,14 +204,19 @@
                     pathsitems = items
                 for n, path in pathsitems:
                     if path and "://" not in path and not os.path.isabs(path):
-                        cdata.set("paths", n, os.path.join(root, path))
+                        cdata.set("paths", n,
+                                  os.path.normpath(os.path.join(root, path)))
 
         # update verbosity/interactive/report_untrusted settings
         if section is None or section == 'ui':
             if name is None or name in ('quiet', 'verbose', 'debug'):
                 self.verbosity_constraints()
             if name is None or name == 'interactive':
-                self.interactive = self.configbool("ui", "interactive", True)
+                interactive = self.configbool("ui", "interactive", None)
+                if interactive is None and self.interactive:
+                    self.interactive = self.isatty()
+                else:
+                    self.interactive = interactive
             if name is None or name == 'report_untrusted':
                 self.report_untrusted = (
                     self.configbool("ui", "report_untrusted", True))
@@ -382,17 +394,41 @@
         try: sys.stderr.flush()
         except: pass
 
-    def readline(self):
-        return sys.stdin.readline()[:-1]
+    def _readline(self, prompt=''):
+        if self.isatty():
+            try:
+                # magically add command line editing support, where
+                # available
+                import readline
+                # force demandimport to really load the module
+                readline.read_history_file
+            except ImportError:
+                pass
+        line = raw_input(prompt)
+        # When stdin is in binary mode on Windows, it can cause
+        # raw_input() to emit an extra trailing carriage return
+        if os.linesep == '\r\n' and line and line[-1] == '\r':
+            line = line[:-1]
+        return line
+
     def prompt(self, msg, pat=None, default="y"):
+        """Prompt user with msg, read response, and ensure it matches pat
+
+        If not interactive -- the default is returned
+        """
         if not self.interactive: return default
-        while 1:
-            self.write(msg, " ")
-            r = self.readline()
-            if not pat or re.match(pat, r):
-                return r
-            else:
-                self.write(_("unrecognized response\n"))
+        while True:
+            try:
+                r = self._readline(msg + ' ')
+                if not r:
+                    return default
+                if not pat or re.match(pat, r):
+                    return r
+                else:
+                    self.write(_("unrecognized response\n"))
+            except EOFError:
+                raise util.Abort(_('response expected'))
+
     def getpass(self, prompt=None, default=None):
         if not self.interactive: return default
         return getpass.getpass(prompt or _('password: '))
@@ -412,9 +448,7 @@
             f.write(text)
             f.close()
 
-            editor = (os.environ.get("HGEDITOR") or
-                    self.config("ui", "editor") or
-                    os.environ.get("EDITOR", "vi"))
+            editor = self.geteditor()
 
             util.system("%s \"%s\"" % (editor, name),
                         environ={'HGUSER': user},
@@ -436,3 +470,11 @@
         if self.traceback:
             traceback.print_exc()
         return self.traceback
+
+    def geteditor(self):
+        '''return editor to use'''
+        return (os.environ.get("HGEDITOR") or
+                self.config("ui", "editor") or
+                os.environ.get("VISUAL") or
+                os.environ.get("EDITOR", "vi"))
+
--- a/mercurial/util.py	Thu Jul 26 07:56:27 2007 -0400
+++ b/mercurial/util.py	Wed Feb 06 19:57:52 2008 -0800
@@ -13,8 +13,9 @@
 """
 
 from i18n import _
-import cStringIO, errno, getpass, popen2, re, shutil, sys, tempfile
-import os, threading, time, calendar, ConfigParser, locale, glob
+import cStringIO, errno, getpass, popen2, re, shutil, sys, tempfile, strutil
+import os, stat, threading, time, calendar, ConfigParser, locale, glob, osutil
+import urlparse
 
 try:
     set = set
@@ -80,18 +81,6 @@
     """Find the length in characters of a local string"""
     return len(s.decode(_encoding, "replace"))
 
-def localsub(s, a, b=None):
-    try:
-        u = s.decode(_encoding, _encodingmode)
-        if b is not None:
-            u = u[a:b]
-        else:
-            u = u[:a]
-        return u.encode(_encoding, _encodingmode)
-    except UnicodeDecodeError, inst:
-        sub = s[max(0, inst.start-10), inst.start+10]
-        raise Abort(_("decoding near '%s': %s!") % (sub, inst))
-
 # used by parsedate
 defaultdateformats = (
     '%Y-%m-%d %H:%M:%S',
@@ -235,13 +224,7 @@
 
 def unique(g):
     """return the uniq elements of iterable g"""
-    seen = {}
-    l = []
-    for f in g:
-        if f not in seen:
-            seen[f] = 1
-            l.append(f)
-    return l
+    return dict.fromkeys(g).keys()
 
 class Abort(Exception):
     """Raised if a command needs to print an error and exit."""
@@ -279,7 +262,7 @@
     "convert a glob pattern into a regexp"
     i, n = 0, len(pat)
     res = ''
-    group = False
+    group = 0
     def peek(): return i < n and pat[i]
     while i < n:
         c = pat[i]
@@ -309,11 +292,11 @@
                     stuff = '\\' + stuff
                 res = '%s[%s]' % (res, stuff)
         elif c == '{':
-            group = True
+            group += 1
             res += '(?:'
         elif c == '}' and group:
             res += ')'
-            group = False
+            group -= 1
         elif c == ',' and group:
             res += '|'
         elif c == '\\':
@@ -345,7 +328,7 @@
         if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
             return os.path.join(root, localpath(n2))
         n2 = '/'.join((pconvert(root), n2))
-    a, b = n1.split(os.sep), n2.split('/')
+    a, b = splitpath(n1), n2.split('/')
     a.reverse()
     b.reverse()
     while a and b and a[-1] == b[-1]:
@@ -358,7 +341,7 @@
     """return the canonical path of myname, given cwd and root"""
     if root == os.sep:
         rootsep = os.sep
-    elif root.endswith(os.sep):
+    elif endswithsep(root):
         rootsep = root
     else:
         rootsep = root + os.sep
@@ -366,6 +349,7 @@
     if not os.path.isabs(name):
         name = os.path.join(root, cwd, name)
     name = os.path.normpath(name)
+    audit_path = path_auditor(root)
     if name != rootsep and name.startswith(rootsep):
         name = name[len(rootsep):]
         audit_path(name)
@@ -476,6 +460,15 @@
         try:
             pat = '(?:%s)' % '|'.join([regex(k, p, tail) for (k, p) in pats])
             return re.compile(pat).match
+        except OverflowError:
+            # We're using a Python with a tiny regex engine and we
+            # made it explode, so we'll divide the pattern list in two
+            # until it works
+            l = len(pats)
+            if l < 2:
+                raise
+            a, b = matchfn(pats[:l//2], tail), matchfn(pats[l//2:], tail)
+            return lambda s: a(s) or b(s)
         except re.error:
             for k, p in pats:
                 try:
@@ -540,17 +533,21 @@
 
     return (roots, match, (inc or exc or anypats) and True)
 
-_hgexecutable = 'hg'
+_hgexecutable = None
+
+def hgexecutable():
+    """return location of the 'hg' executable.
+
+    Defaults to $HG or 'hg' in the search path.
+    """
+    if _hgexecutable is None:
+        set_hgexecutable(os.environ.get('HG') or find_exe('hg', 'hg'))
+    return _hgexecutable
 
 def set_hgexecutable(path):
-    """remember location of the 'hg' executable if easily possible
-
-    path might be None or empty if hg was loaded as a module,
-    fall back to 'hg' in this case.
-    """
+    """set location of the 'hg' executable"""
     global _hgexecutable
-    if path:
-        _hgexecutable = os.path.abspath(path)
+    _hgexecutable = path
 
 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None):
     '''enhanced shell command execution.
@@ -577,8 +574,7 @@
     try:
         for k, v in environ.iteritems():
             os.environ[k] = py2shell(v)
-        if 'HG' not in os.environ:
-            os.environ['HG'] = _hgexecutable
+        os.environ['HG'] = hgexecutable()
         if cwd is not None and oldcwd != cwd:
             os.chdir(cwd)
         rc = os.system(cmd)
@@ -663,7 +659,7 @@
 
     if os.path.isdir(src):
         os.mkdir(dst)
-        for name in os.listdir(src):
+        for name, kind in osutil.listdir(src):
             srcname = os.path.join(src, name)
             dstname = os.path.join(dst, name)
             copyfiles(srcname, dstname, hardlink)
@@ -677,12 +673,60 @@
         else:
             shutil.copy(src, dst)
 
-def audit_path(path):
-    """Abort if path contains dangerous components"""
-    parts = os.path.normcase(path).split(os.sep)
-    if (os.path.splitdrive(path)[0] or parts[0] in ('.hg', '')
-        or os.pardir in parts):
-        raise Abort(_("path contains illegal component: %s") % path)
+class path_auditor(object):
+    '''ensure that a filesystem path contains no banned components.
+    the following properties of a path are checked:
+
+    - under top-level .hg
+    - starts at the root of a windows drive
+    - contains ".."
+    - traverses a symlink (e.g. a/symlink_here/b)
+    - inside a nested repository'''
+
+    def __init__(self, root):
+        self.audited = set()
+        self.auditeddir = set()
+        self.root = root
+
+    def __call__(self, path):
+        if path in self.audited:
+            return
+        normpath = os.path.normcase(path)
+        parts = splitpath(normpath)
+        if (os.path.splitdrive(path)[0] or parts[0] in ('.hg', '')
+            or os.pardir in parts):
+            raise Abort(_("path contains illegal component: %s") % path)
+        def check(prefix):
+            curpath = os.path.join(self.root, prefix)
+            try:
+                st = os.lstat(curpath)
+            except OSError, err:
+                # EINVAL can be raised as invalid path syntax under win32.
+                # They must be ignored for patterns can be checked too.
+                if err.errno not in (errno.ENOENT, errno.ENOTDIR, errno.EINVAL):
+                    raise
+            else:
+                if stat.S_ISLNK(st.st_mode):
+                    raise Abort(_('path %r traverses symbolic link %r') %
+                                (path, prefix))
+                elif (stat.S_ISDIR(st.st_mode) and
+                      os.path.isdir(os.path.join(curpath, '.hg'))):
+                    raise Abort(_('path %r is inside repo %r') %
+                                (path, prefix))
+        parts.pop()
+        prefixes = []
+        for n in range(len(parts)):
+            prefix = os.sep.join(parts)
+            if prefix in self.auditeddir:
+                break
+            check(prefix)
+            prefixes.append(prefix)
+            parts.pop()
+
+        self.audited.add(path)
+        # only add prefixes to the cache after checking everything: we don't
+        # want to add "foo/bar/baz" before checking if there's a "foo/.hg"
+        self.auditeddir.update(prefixes)
 
 def _makelock_file(info, pathname):
     ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
@@ -711,12 +755,9 @@
 
 posixfile = file
 
-def is_win_9x():
-    '''return true if run on windows 95, 98 or me.'''
-    try:
-        return sys.getwindowsversion()[3] == 1
-    except AttributeError:
-        return os.name == 'nt' and 'command' in os.environ.get('comspec', '')
+def openhardlinks():
+    '''return true if it is safe to hold open file handles to hardlinks'''
+    return True
 
 getuser_fallback = None
 
@@ -790,13 +831,26 @@
 
     Requires a directory (like /foo/.hg)
     """
-    fh, fn = tempfile.mkstemp("", "", path)
-    os.close(fh)
-    m = os.stat(fn).st_mode
-    os.chmod(fn, m ^ 0111)
-    r = (os.stat(fn).st_mode != m)
-    os.unlink(fn)
-    return r
+
+    # VFAT on some Linux versions can flip mode but it doesn't persist
+    # a FS remount. Frequently we can detect it if files are created
+    # with exec bit on.
+
+    try:
+        EXECFLAGS = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
+        fh, fn = tempfile.mkstemp("", "", path)
+        try:
+            os.close(fh)
+            m = os.stat(fn).st_mode & 0777
+            new_file_has_exec = m & EXECFLAGS
+            os.chmod(fn, m ^ EXECFLAGS)
+            exec_flags_cannot_flip = ((os.stat(fn).st_mode & 0777) == m)
+        finally:
+            os.unlink(fn)
+    except (IOError, OSError):
+        # we don't care, the user probably won't be able to commit anyway
+        return False
+    return not (new_file_has_exec or exec_flags_cannot_flip)
 
 def execfunc(path, fallback):
     '''return an is_exec() function with default to fallback'''
@@ -829,6 +883,22 @@
     """return True if patches should be applied in binary mode by default."""
     return os.name == 'nt'
 
+def endswithsep(path):
+    '''Check path ends with os.sep or os.altsep.'''
+    return path.endswith(os.sep) or os.altsep and path.endswith(os.altsep)
+
+def splitpath(path):
+    '''Split path by os.sep.
+    Note that this function does not use os.altsep because this is
+    an alternative of simple "xxx.split(os.sep)".
+    It is recommended to use os.path.normpath() before using this
+    function if need.'''
+    return path.split(os.sep)
+
+def gui():
+    '''Are we running in a GUI?'''
+    return os.name == "nt" or os.name == "mac" or os.environ.get("DISPLAY")
+
 # Platform specific variants
 if os.name == 'nt':
     import msvcrt
@@ -850,7 +920,15 @@
 
         def write(self, s):
             try:
-                return self.fp.write(s)
+                # This is workaround for "Not enough space" error on
+                # writing large size of data to console.
+                limit = 16000
+                l = len(s)
+                start = 0
+                while start < l:
+                    end = start + limit
+                    self.fp.write(s[start:end])
+                    start = end
             except IOError, inst:
                 if inst.errno != 0: raise
                 self.close()
@@ -866,6 +944,16 @@
 
     sys.stdout = winstdout(sys.stdout)
 
+    def _is_win_9x():
+        '''return true if run on windows 95, 98 or me.'''
+        try:
+            return sys.getwindowsversion()[3] == 1
+        except AttributeError:
+            return 'command' in os.environ.get('comspec', '')
+
+    def openhardlinks():
+        return not _is_win_9x and "win32api" in locals()
+
     def system_rcpath():
         try:
             return system_rcpath_win32()
@@ -891,21 +979,24 @@
             pf = pf[1:-1] # Remove the quotes
         return pf
 
+    def sshargs(sshcmd, host, user, port):
+        '''Build argument list for ssh or Plink'''
+        pflag = 'plink' in sshcmd.lower() and '-P' or '-p'
+        args = user and ("%s@%s" % (user, host)) or host
+        return port and ("%s %s %s" % (args, pflag, port)) or args
+
     def testpid(pid):
         '''return False if pid dead, True if running or not known'''
         return True
 
-    def set_exec(f, mode):
-        pass
-
-    def set_link(f, mode):
+    def set_flags(f, flags):
         pass
 
     def set_binary(fd):
         msvcrt.setmode(fd.fileno(), os.O_BINARY)
 
     def pconvert(path):
-        return path.replace("\\", "/")
+        return '/'.join(splitpath(path))
 
     def localpath(path):
         return path.replace('/', '\\')
@@ -937,6 +1028,19 @@
             _quotere = re.compile(r'(\\*)("|\\$)')
         return '"%s"' % _quotere.sub(r'\1\1\\\2', s)
 
+    def quotecommand(cmd):
+        """Build a command string suitable for os.popen* calls."""
+        # The extra quotes are needed because popen* runs the command
+        # through the current COMSPEC. cmd.exe suppress enclosing quotes.
+        return '"' + cmd + '"'
+
+    def popen(command):
+        # Work around "popen spawned process may not write to stdout
+        # under windows"
+        # http://bugs.python.org/issue1366
+        command += " 2> %s" % nulldev
+        return os.popen(quotecommand(command))
+
     def explain_exit(code):
         return _("exited with status %d") % code, code
 
@@ -978,7 +1082,7 @@
     try:
         # override functions with win32 versions if possible
         from util_win32 import *
-        if not is_win_9x():
+        if not _is_win_9x():
             posixfile = posixfile_nt
     except ImportError:
         pass
@@ -986,11 +1090,15 @@
 else:
     nulldev = '/dev/null'
 
+    def lookup_reg(key, name=None, scope=None):
+        return None
+
     def rcfiles(path):
         rcs = [os.path.join(path, 'hgrc')]
         rcdir = os.path.join(path, 'hgrc.d')
         try:
-            rcs.extend([os.path.join(rcdir, f) for f in os.listdir(rcdir)
+            rcs.extend([os.path.join(rcdir, f)
+                        for f, kind in osutil.listdir(rcdir)
                         if f.endswith(".rc")])
         except OSError:
             pass
@@ -1019,41 +1127,43 @@
                 pf = pf[1:-1] # Remove the quotes
         return pf
 
+    def sshargs(sshcmd, host, user, port):
+        '''Build argument list for ssh'''
+        args = user and ("%s@%s" % (user, host)) or host
+        return port and ("%s -p %s" % (args, port)) or args
+
     def is_exec(f):
         """check whether a file is executable"""
         return (os.lstat(f).st_mode & 0100 != 0)
 
-    def set_exec(f, mode):
+    def set_flags(f, flags):
         s = os.lstat(f).st_mode
-        if (s & 0100 != 0) == mode:
+        x = "x" in flags
+        l = "l" in flags
+        if l:
+            if not stat.S_ISLNK(s):
+                # switch file to link
+                data = file(f).read()
+                os.unlink(f)
+                os.symlink(data, f)
+            # no chmod needed at this point
             return
-        if mode:
+        if stat.S_ISLNK(s):
+            # switch link to file
+            data = os.readlink(f)
+            os.unlink(f)
+            file(f, "w").write(data)
+            s = 0666 & ~_umask # avoid restatting for chmod
+
+        sx = s & 0100
+        if x and not sx:
             # Turn on +x for every +r bit when making a file executable
             # and obey umask.
             os.chmod(f, s | (s & 0444) >> 2 & ~_umask)
-        else:
+        elif not x and sx:
+            # Turn off all +x bits
             os.chmod(f, s & 0666)
 
-    def set_link(f, mode):
-        """make a file a symbolic link/regular file
-
-        if a file is changed to a link, its contents become the link data
-        if a link is changed to a file, its link data become its contents
-        """
-
-        m = os.path.islink(f)
-        if m == bool(mode):
-            return
-
-        if mode: # switch file to link
-            data = file(f).read()
-            os.unlink(f)
-            os.symlink(data, f)
-        else:
-            data = os.readlink(f)
-            os.unlink(f)
-            file(f, "w").write(data)
-
     def set_binary(fd):
         pass
 
@@ -1090,6 +1200,12 @@
         else:
             return "'%s'" % s.replace("'", "'\\''")
 
+    def quotecommand(cmd):
+        return cmd
+
+    def popen(command):
+        return os.popen(command)
+
     def testpid(pid):
         '''return False if pid dead, True if running or not sure'''
         if os.sys.platform == 'OpenVMS':
@@ -1199,7 +1315,7 @@
     # what we want.  If the original file already exists, just copy
     # its mode.  Otherwise, manually obey umask.
     try:
-        st_mode = os.lstat(name).st_mode
+        st_mode = os.lstat(name).st_mode & 0777
     except OSError, inst:
         if inst.errno != errno.ENOENT:
             raise
@@ -1259,7 +1375,10 @@
     """
     def __init__(self, base, audit=True):
         self.base = base
-        self.audit = audit
+        if audit:
+            self.audit_path = path_auditor(base)
+        else:
+            self.audit_path = always
 
     def __getattr__(self, name):
         if name == '_can_symlink':
@@ -1268,8 +1387,7 @@
         raise AttributeError(name)
 
     def __call__(self, path, mode="r", text=False, atomictemp=False):
-        if self.audit:
-            audit_path(path)
+        self.audit_path(path)
         f = os.path.join(self.base, path)
 
         if not text and "b" not in mode:
@@ -1290,8 +1408,7 @@
         return posixfile(f, mode)
 
     def symlink(self, src, dst):
-        if self.audit:
-            audit_path(dst)
+        self.audit_path(dst)
         linkname = os.path.join(self.base, dst)
         try:
             os.unlink(linkname)
@@ -1309,7 +1426,7 @@
                 raise OSError(err.errno, _('could not symlink to %r: %s') %
                               (src, err.strerror), linkname)
         else:
-            f = self(self, dst, "w")
+            f = self(dst, "w")
             f.write(src)
             f.close()
 
@@ -1317,45 +1434,34 @@
     """Allow arbitrary sized chunks of data to be efficiently read from an
     iterator over chunks of arbitrary size."""
 
-    def __init__(self, in_iter, targetsize = 2**16):
+    def __init__(self, in_iter):
         """in_iter is the iterator that's iterating over the input chunks.
         targetsize is how big a buffer to try to maintain."""
-        self.in_iter = iter(in_iter)
+        self.iter = iter(in_iter)
         self.buf = ''
-        self.targetsize = int(targetsize)
-        if self.targetsize <= 0:
-            raise ValueError(_("targetsize must be greater than 0, was %d") %
-                             targetsize)
-        self.iterempty = False
-
-    def fillbuf(self):
-        """Ignore target size; read every chunk from iterator until empty."""
-        if not self.iterempty:
-            collector = cStringIO.StringIO()
-            collector.write(self.buf)
-            for ch in self.in_iter:
-                collector.write(ch)
-            self.buf = collector.getvalue()
-            self.iterempty = True
+        self.targetsize = 2**16
 
     def read(self, l):
         """Read L bytes of data from the iterator of chunks of data.
         Returns less than L bytes if the iterator runs dry."""
-        if l > len(self.buf) and not self.iterempty:
+        if l > len(self.buf) and self.iter:
             # Clamp to a multiple of self.targetsize
-            targetsize = self.targetsize * ((l // self.targetsize) + 1)
+            targetsize = max(l, self.targetsize)
             collector = cStringIO.StringIO()
             collector.write(self.buf)
             collected = len(self.buf)
-            for chunk in self.in_iter:
+            for chunk in self.iter:
                 collector.write(chunk)
                 collected += len(chunk)
                 if collected >= targetsize:
                     break
             if collected < targetsize:
-                self.iterempty = True
+                self.iter = False
             self.buf = collector.getvalue()
-        s, self.buf = self.buf[:l], buffer(self.buf, l)
+        if len(self.buf) == l:
+            s, self.buf = str(self.buf), ''
+        else:
+            s, self.buf = self.buf[:l], buffer(self.buf, l)
         return s
 
 def filechunkiter(f, size=65536, limit=None):
@@ -1383,7 +1489,7 @@
         tz = time.timezone
     return time.mktime(lt), tz
 
-def datestr(date=None, format='%a %b %d %H:%M:%S %Y', timezone=True):
+def datestr(date=None, format='%a %b %d %H:%M:%S %Y', timezone=True, timezone_format=" %+03d%02d"):
     """represent a (unixtime, offset) tuple as a localized time.
     unixtime is seconds since the epoch, and offset is the time zone's
     number of seconds away from UTC. if timezone is false, do not
@@ -1391,10 +1497,10 @@
     t, tz = date or makedate()
     s = time.strftime(format, time.gmtime(float(t) - tz))
     if timezone:
-        s += " %+03d%02d" % (-tz / 3600, ((-tz % 3600) / 60))
+        s += timezone_format % (-tz / 3600, ((-tz % 3600) / 60))
     return s
 
-def strdate(string, format, defaults):
+def strdate(string, format, defaults=[]):
     """parse a localized time string and return a (unixtime, offset) tuple.
     if the string cannot be parsed, ValueError is raised."""
     def timezone(string):
@@ -1537,6 +1643,12 @@
         user = user[:f]
     return user
 
+def email(author):
+    '''get email of author.'''
+    r = author.find('>')
+    if r == -1: r = None
+    return author[author.find('<')+1:r]
+
 def ellipsis(text, maxlength=400):
     """Trim string to at most maxlength (default: 400) characters."""
     if len(text) <= maxlength:
@@ -1579,7 +1691,7 @@
             for p in os.environ['HGRCPATH'].split(os.pathsep):
                 if not p: continue
                 if os.path.isdir(p):
-                    for f in os.listdir(p):
+                    for f, kind in osutil.listdir(p):
                         if f.endswith('.rc'):
                             _rcpath.append(os.path.join(p, f))
                 else:
@@ -1616,3 +1728,19 @@
         if path.startswith('//'):
             path = path[2:]
     return path
+
+def uirepr(s):
+    # Avoid double backslash in Windows path repr()
+    return repr(s).replace('\\\\', '\\')
+
+def hidepassword(url):
+    '''hide user credential in a url string'''
+    scheme, netloc, path, params, query, fragment = urlparse.urlparse(url)
+    netloc = re.sub('([^:]*):([^@]*)@(.*)', r'\1:***@\3', netloc)
+    return urlparse.urlunparse((scheme, netloc, path, params, query, fragment))
+
+def removeauth(url):
+    '''remove all authentication information from a url string'''
+    scheme, netloc, path, params, query, fragment = urlparse.urlparse(url)
+    netloc = netloc[netloc.find('@')+1:]
+    return urlparse.urlunparse((scheme, netloc, path, params, query, fragment))
--- a/mercurial/util_win32.py	Thu Jul 26 07:56:27 2007 -0400
+++ b/mercurial/util_win32.py	Wed Feb 06 19:57:52 2008 -0800
@@ -16,6 +16,7 @@
 from i18n import _
 import errno, os, pywintypes, win32con, win32file, win32process
 import cStringIO, winerror
+import osutil
 from win32com.shell import shell,shellcon
 
 class WinError:
@@ -146,9 +147,18 @@
                          self.win_strerror)
 
 def os_link(src, dst):
-    # NB will only succeed on NTFS
     try:
         win32file.CreateHardLink(dst, src)
+        # CreateHardLink sometimes succeeds on mapped drives but
+        # following nlinks() returns 1. Check it now and bail out.
+        if nlinks(src) < 2:
+            try:
+                win32file.DeleteFile(dst)
+            except:
+                pass
+            # Fake hardlinking error
+            raise WinOSError((18, 'CreateHardLink', 'The system cannot '
+                              'move the file to a different disk drive'))
     except pywintypes.error, details:
         raise WinOSError(details)
 
@@ -177,6 +187,37 @@
         return details[0] != winerror.ERROR_INVALID_PARAMETER
     return True
 
+def lookup_reg(key, valname=None, scope=None):
+    ''' Look up a key/value name in the Windows registry.
+
+    valname: value name. If unspecified, the default value for the key
+    is used.
+    scope: optionally specify scope for registry lookup, this can be
+    a sequence of scopes to look up in order. Default (CURRENT_USER,
+    LOCAL_MACHINE).
+    '''
+    try:
+        from _winreg import HKEY_CURRENT_USER, HKEY_LOCAL_MACHINE, \
+            QueryValueEx, OpenKey
+    except ImportError:
+        return None
+
+    def query_val(scope, key, valname):
+        try:
+            keyhandle = OpenKey(scope, key)
+            return QueryValueEx(keyhandle, valname)[0]
+        except EnvironmentError:
+            return None
+
+    if scope is None:
+        scope = (HKEY_CURRENT_USER, HKEY_LOCAL_MACHINE)
+    elif not isinstance(scope, (list, tuple)):
+        scope = (scope,)
+    for s in scope:
+        val = query_val(s, key, valname)
+        if val is not None:
+            return val
+
 def system_rcpath_win32():
     '''return default os-specific hgrc search path'''
     proc = win32api.GetCurrentProcess()
@@ -185,12 +226,30 @@
         filename = win32process.GetModuleFileNameEx(proc, 0)
     except:
         filename = win32api.GetModuleFileName(0)
-    return [os.path.join(os.path.dirname(filename), 'mercurial.ini')]
+    # Use mercurial.ini found in directory with hg.exe
+    progrc = os.path.join(os.path.dirname(filename), 'mercurial.ini')
+    if os.path.isfile(progrc):
+        return [progrc]
+    # else look for a system rcpath in the registry
+    try:
+        value = win32api.RegQueryValue(
+                win32con.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Mercurial')
+        rcpath = []
+        for p in value.split(os.pathsep):
+            if p.lower().endswith('mercurial.ini'):
+                rcpath.append(p)
+            elif os.path.isdir(p):
+                for f, kind in osutil.listdir(p):
+                    if f.endswith('.rc'):
+                        rcpath.append(os.path.join(p, f))
+        return rcpath
+    except pywintypes.error:
+        return []
 
 def user_rcpath_win32():
     '''return os-specific hgrc search path to the user dir'''
     userdir = os.path.expanduser('~')
-    if userdir == '~':
+    if sys.getwindowsversion() != 2 and userdir == '~':
         # We are on win < nt: fetch the APPDATA directory location and use
         # the parent directory as the user home dir.
         appdir = shell.SHGetPathFromIDList(
@@ -208,6 +267,9 @@
     # but does not work at all. wrap win32 file api instead.
 
     def __init__(self, name, mode='rb'):
+        self.closed = False
+        self.name = name
+        self.mode = mode
         access = 0
         if 'r' in mode or '+' in mode:
             access |= win32file.GENERIC_READ
@@ -231,9 +293,6 @@
                                                0)
         except pywintypes.error, err:
             raise WinIOError(err, name)
-        self.closed = False
-        self.name = name
-        self.mode = mode
 
     def __iter__(self):
         for line in self.read().splitlines(True):
@@ -266,6 +325,10 @@
         except pywintypes.error, err:
             raise WinIOError(err)
 
+    def writelines(self, sequence):
+        for s in sequence:
+            self.write(s)
+
     def seek(self, pos, whence=0):
         try:
             win32file.SetFilePointer(self.handle, int(pos), whence)
@@ -285,10 +348,8 @@
             self.closed = True
 
     def flush(self):
-        try:
-            win32file.FlushFileBuffers(self.handle)
-        except pywintypes.error, err:
-            raise WinIOError(err)
+        # we have no application-level buffering
+        pass
 
     def truncate(self, pos=0):
         try:
--- a/mercurial/verify.py	Thu Jul 26 07:56:27 2007 -0400
+++ b/mercurial/verify.py	Wed Feb 06 19:57:52 2008 -0800
@@ -7,7 +7,7 @@
 
 from node import *
 from i18n import _
-import revlog, mdiff
+import revlog
 
 def verify(repo):
     lock = repo.lock()
@@ -20,12 +20,23 @@
     filelinkrevs = {}
     filenodes = {}
     changesets = revisions = files = 0
+    firstbad = [None]
     errors = [0]
     warnings = [0]
     neededmanifests = {}
 
-    def err(msg):
-        repo.ui.warn(msg + "\n")
+    def err(linkrev, msg, filename=None):
+        if linkrev != None:
+            if firstbad[0] != None:
+                firstbad[0] = min(firstbad[0], linkrev)
+            else:
+                firstbad[0] = linkrev
+        else:
+            linkrev = "?"
+        msg = "%s: %s" % (linkrev, msg)
+        if filename:
+            msg = "%s@%s" % (filename, msg)
+        repo.ui.warn(" " + msg + "\n")
         errors[0] += 1
 
     def warn(msg):
@@ -35,9 +46,9 @@
     def checksize(obj, name):
         d = obj.checksize()
         if d[0]:
-            err(_("%s data length off by %d bytes") % (name, d[0]))
+            err(None, _("data length off by %d bytes") % d[0], name)
         if d[1]:
-            err(_("%s index contains %d extra bytes") % (name, d[1]))
+            err(None, _("index contains %d extra bytes") % d[1], name)
 
     def checkversion(obj, name):
         if obj.version != revlog.REVLOGV0:
@@ -51,121 +62,158 @@
         repo.ui.status(_("repository uses revlog format %d\n") %
                        (revlogv1 and 1 or 0))
 
+    havecl = havemf = 1
     seen = {}
     repo.ui.status(_("checking changesets\n"))
-    checksize(repo.changelog, "changelog")
+    if repo.changelog.count() == 0 and repo.manifest.count() > 1:
+        havecl = 0
+        err(0, _("empty or missing 00changelog.i"))
+    else:
+        checksize(repo.changelog, "changelog")
 
     for i in xrange(repo.changelog.count()):
         changesets += 1
         n = repo.changelog.node(i)
         l = repo.changelog.linkrev(n)
         if l != i:
-            err(_("incorrect link (%d) for changeset revision %d") %(l, i))
+            err(i, _("incorrect link (%d) for changeset") %(l))
         if n in seen:
-            err(_("duplicate changeset at revision %d") % i)
-        seen[n] = 1
+            err(i, _("duplicates changeset at revision %d") % seen[n])
+        seen[n] = i
 
         for p in repo.changelog.parents(n):
             if p not in repo.changelog.nodemap:
-                err(_("changeset %s has unknown parent %s") %
-                             (short(n), short(p)))
+                err(i, _("changeset has unknown parent %s") % short(p))
         try:
             changes = repo.changelog.read(n)
         except KeyboardInterrupt:
             repo.ui.warn(_("interrupted"))
             raise
         except Exception, inst:
-            err(_("unpacking changeset %s: %s") % (short(n), inst))
+            err(i, _("unpacking changeset: %s") % inst)
             continue
 
-        neededmanifests[changes[0]] = n
+        if changes[0] not in neededmanifests:
+            neededmanifests[changes[0]] = i
 
         for f in changes[3]:
             filelinkrevs.setdefault(f, []).append(i)
 
     seen = {}
     repo.ui.status(_("checking manifests\n"))
-    checkversion(repo.manifest, "manifest")
-    checksize(repo.manifest, "manifest")
+    if repo.changelog.count() > 0 and repo.manifest.count() == 0:
+        havemf = 0
+        err(0, _("empty or missing 00manifest.i"))
+    else:
+        checkversion(repo.manifest, "manifest")
+        checksize(repo.manifest, "manifest")
 
     for i in xrange(repo.manifest.count()):
         n = repo.manifest.node(i)
         l = repo.manifest.linkrev(n)
 
-        if l < 0 or l >= repo.changelog.count():
-            err(_("bad manifest link (%d) at revision %d") % (l, i))
+        if l < 0 or (havecl and l >= repo.changelog.count()):
+            err(None, _("bad link (%d) at manifest revision %d") % (l, i))
 
         if n in neededmanifests:
             del neededmanifests[n]
 
         if n in seen:
-            err(_("duplicate manifest at revision %d") % i)
+            err(l, _("duplicates manifest from %d") % seen[n])
 
-        seen[n] = 1
+        seen[n] = l
 
         for p in repo.manifest.parents(n):
             if p not in repo.manifest.nodemap:
-                err(_("manifest %s has unknown parent %s") %
-                    (short(n), short(p)))
+                err(l, _("manifest has unknown parent %s") % short(p))
 
         try:
             for f, fn in repo.manifest.readdelta(n).iteritems():
-                filenodes.setdefault(f, {})[fn] = 1
+                fns = filenodes.setdefault(f, {})
+                if fn not in fns:
+                    fns[fn] = n
         except KeyboardInterrupt:
             repo.ui.warn(_("interrupted"))
             raise
         except Exception, inst:
-            err(_("reading delta for manifest %s: %s") % (short(n), inst))
+            err(l, _("reading manifest delta: %s") % inst)
             continue
 
     repo.ui.status(_("crosschecking files in changesets and manifests\n"))
 
-    for m, c in neededmanifests.items():
-        err(_("Changeset %s refers to unknown manifest %s") %
-            (short(m), short(c)))
-    del neededmanifests
+    if havemf > 0:
+        nm = [(c, m) for m, c in neededmanifests.items()]
+        nm.sort()
+        for c, m in nm:
+            err(c, _("changeset refers to unknown manifest %s") % short(m))
+        del neededmanifests, nm
 
-    for f in filenodes:
-        if f not in filelinkrevs:
-            err(_("file %s in manifest but not in changesets") % f)
+    if havecl:
+        fl = filenodes.keys()
+        fl.sort()
+        for f in fl:
+            if f not in filelinkrevs:
+                lrs = [repo.manifest.linkrev(n) for n in filenodes[f]]
+                lrs.sort()
+                err(lrs[0], _("in manifest but not in changeset"), f)
+        del fl
 
-    for f in filelinkrevs:
-        if f not in filenodes:
-            err(_("file %s in changeset but not in manifest") % f)
+    if havemf:
+        fl = filelinkrevs.keys()
+        fl.sort()
+        for f in fl:
+            if f not in filenodes:
+                lr = filelinkrevs[f][0]
+                err(lr, _("in changeset but not in manifest"), f)
+        del fl
 
     repo.ui.status(_("checking files\n"))
-    ff = filenodes.keys()
+    ff = dict.fromkeys(filenodes.keys() + filelinkrevs.keys()).keys()
     ff.sort()
     for f in ff:
         if f == "/dev/null":
             continue
         files += 1
         if not f:
-            err(_("file without name in manifest %s") % short(n))
+            lr = filelinkrevs[f][0]
+            err(lr, _("file without name in manifest"))
             continue
         fl = repo.file(f)
         checkversion(fl, f)
         checksize(fl, f)
 
+        if fl.count() == 0:
+            err(filelinkrevs[f][0], _("empty or missing revlog"), f)
+            continue
+
+        seen = {}
         nodes = {nullid: 1}
-        seen = {}
         for i in xrange(fl.count()):
             revisions += 1
             n = fl.node(i)
+            flr = fl.linkrev(n)
+
+            if flr < 0 or (havecl and flr not in filelinkrevs.get(f, [])):
+                if flr < 0 or flr >= repo.changelog.count():
+                    err(None, _("rev %d point to nonexistent changeset %d")
+                        % (i, flr), f)
+                else:
+                    err(None, _("rev %d points to unexpected changeset %d")
+                        % (i, flr), f)
+                if f in filelinkrevs:
+                    warn(_(" (expected %s)") % filelinkrevs[f][0])
+                flr = None # can't be trusted
+            else:
+                if havecl:
+                    filelinkrevs[f].remove(flr)
 
             if n in seen:
-                err(_("%s: duplicate revision %d") % (f, i))
-            if n not in filenodes[f]:
-                err(_("%s: %d:%s not in manifests") % (f, i, short(n)))
-            else:
-                del filenodes[f][n]
-
-            flr = fl.linkrev(n)
-            if flr not in filelinkrevs.get(f, []):
-                err(_("%s:%s points to unexpected changeset %d")
-                        % (f, short(n), flr))
-            else:
-                filelinkrevs[f].remove(flr)
+                err(flr, _("duplicate revision %d") % i, f)
+            if f in filenodes:
+                if havemf and n not in filenodes[f]:
+                    err(flr, _("%s not in manifests") % (short(n)), f)
+                else:
+                    del filenodes[f][n]
 
             # verify contents
             try:
@@ -174,16 +222,22 @@
                 repo.ui.warn(_("interrupted"))
                 raise
             except Exception, inst:
-                err(_("unpacking file %s %s: %s") % (f, short(n), inst))
+                err(flr, _("unpacking %s: %s") % (short(n), inst), f)
 
             # verify parents
-            (p1, p2) = fl.parents(n)
-            if p1 not in nodes:
-                err(_("file %s:%s unknown parent 1 %s") %
-                    (f, short(n), short(p1)))
-            if p2 not in nodes:
-                err(_("file %s:%s unknown parent 2 %s") %
-                        (f, short(n), short(p1)))
+            try:
+                (p1, p2) = fl.parents(n)
+                if p1 not in nodes:
+                    err(flr, _("unknown parent 1 %s of %s") %
+                        (short(p1), short(n)), f)
+                if p2 not in nodes:
+                    err(flr, _("unknown parent 2 %s of %s") %
+                            (short(p2), short(p1)), f)
+            except KeyboardInterrupt:
+                repo.ui.warn(_("interrupted"))
+                raise
+            except Exception, inst:
+                err(flr, _("checking parents of %s: %s") % (short(n), inst), f)
             nodes[n] = 1
 
             # check renames
@@ -196,11 +250,16 @@
                 repo.ui.warn(_("interrupted"))
                 raise
             except Exception, inst:
-                err(_("checking rename on file %s %s: %s") % (f, short(n), inst))
+                err(flr, _("checking rename of %s: %s") %
+                    (short(n), inst), f)
 
         # cross-check
-        for node in filenodes[f]:
-            err(_("node %s in manifests not in %s") % (hex(node), f))
+        if f in filenodes:
+            fns = [(repo.manifest.linkrev(filenodes[f][n]), n)
+                   for n in filenodes[f]]
+            fns.sort()
+            for lr, node in fns:
+                err(lr, _("%s in manifests not found") % short(node), f)
 
     repo.ui.status(_("%d files, %d changesets, %d total revisions\n") %
                    (files, changesets, revisions))
@@ -209,5 +268,7 @@
         repo.ui.warn(_("%d warnings encountered!\n") % warnings[0])
     if errors[0]:
         repo.ui.warn(_("%d integrity errors encountered!\n") % errors[0])
+        if firstbad[0]:
+            repo.ui.warn(_("(first damaged changeset appears to be %d)\n")
+                         % firstbad[0])
         return 1
-
--- a/mercurial/version.py	Thu Jul 26 07:56:27 2007 -0400
+++ b/mercurial/version.py	Wed Feb 06 19:57:52 2008 -0800
@@ -1,4 +1,4 @@
-# Copyright (C) 2005, 2006 by Intevation GmbH
+# Copyright (C) 2005, 2006, 2008 by Intevation GmbH
 # Author(s):
 # Thomas Arendsen Hein <thomas@intevation.de>
 #
@@ -10,7 +10,6 @@
 """
 
 import os
-import os.path
 import re
 import time
 import util
@@ -50,7 +49,7 @@
     """Store version information."""
     global remembered_version
     if not version and os.path.isdir(".hg"):
-        f = os.popen("hg identify 2> %s" % util.nulldev)  # use real hg installation
+        f = util.popen("hg identify")  # use real hg installation
         ident = f.read()[:-1]
         if not f.close() and ident:
             ids = ident.split(' ', 1)
--- a/setup.py	Thu Jul 26 07:56:27 2007 -0400
+++ b/setup.py	Wed Feb 06 19:57:52 2008 -0800
@@ -14,8 +14,6 @@
 from distutils.command.install_data import install_data
 
 import mercurial.version
-import mercurial.demandimport
-mercurial.demandimport.enable = lambda: None
 
 extra = {}
 
@@ -54,6 +52,19 @@
 mercurial.version.remember_version(version)
 cmdclass = {'install_data': install_package_data}
 
+ext_modules=[
+    Extension('mercurial.mpatch', ['mercurial/mpatch.c']),
+    Extension('mercurial.bdiff', ['mercurial/bdiff.c']),
+    Extension('mercurial.base85', ['mercurial/base85.c']),
+    Extension('mercurial.diffhelpers', ['mercurial/diffhelpers.c'])
+    ]
+
+try:
+    import posix
+    ext_modules.append(Extension('mercurial.osutil', ['mercurial/osutil.c']))
+except ImportError:
+    pass
+
 setup(name='mercurial',
       version=mercurial.version.get_version(),
       author='Matt Mackall',
@@ -61,16 +72,13 @@
       url='http://selenic.com/mercurial',
       description='Scalable distributed SCM',
       license='GNU GPL',
+      scripts=['hg'],
       packages=['mercurial', 'mercurial.hgweb', 'hgext', 'hgext.convert'],
-      ext_modules=[Extension('mercurial.mpatch', ['mercurial/mpatch.c']),
-                   Extension('mercurial.bdiff', ['mercurial/bdiff.c']),
-                   Extension('mercurial.base85', ['mercurial/base85.c']),
-                   Extension('mercurial.diffhelpers', ['mercurial/diffhelpers.c'])],
+      ext_modules=ext_modules,
       data_files=[(os.path.join('mercurial', root),
                    [os.path.join(root, file_) for file_ in files])
                   for root, dirs, files in os.walk('templates')],
       cmdclass=cmdclass,
-      scripts=['hg', 'hgmerge'],
       options=dict(py2exe=dict(packages=['hgext']),
                    bdist_mpkg=dict(zipdist=True,
                                    license='COPYING',
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/templates/atom/changelog.tmpl	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,10 @@
+#header#
+ <!-- Changelog -->
+ <id>{urlbase}{url}</id>
+ <link rel="self" href="{urlbase}{url}atom-log"/>
+ <link rel="alternate" href="{urlbase}{url}"/>
+ <title>#repo|escape# Changelog</title>
+ #latestentry%feedupdated#
+
+#entries%changelogentry#
+</feed>
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/templates/atom/changelogentry.tmpl	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,16 @@
+ <entry>
+  <title>#desc|strip|firstline|strip|escape#</title>
+  <id>http://www.selenic.com/mercurial/#changeset-{node}</id>
+  <link href="{urlbase}{url}rev/{node}"/>
+  <author>
+   <name>#author|person|escape#</name>
+   <email>#author|email|obfuscate#</email>
+  </author>
+  <updated>#date|rfc3339date#</updated>
+  <published>#date|rfc3339date#</published>
+  <content type="xhtml">
+   <div xmlns="http://www.w3.org/1999/xhtml">
+    <pre xml:space="preserve">#desc|escape#</pre>
+   </div>
+  </content>
+ </entry>
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/templates/atom/filelog.tmpl	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,8 @@
+#header#
+ <id>{urlbase}{url}atom-log/tip/{file|escape}</id>
+ <link rel="self" href="{urlbase}{url}atom-log/tip/{file|escape}"/>
+ <title>#repo|escape#: #file|escape# history</title>
+ #latestentry%feedupdated#
+
+#entries%changelogentry#
+</feed>
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/templates/atom/header.tmpl	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,2 @@
+<?xml version="1.0" encoding="{encoding}"?>
+<feed xmlns="http://www.w3.org/2005/Atom">
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/templates/atom/map	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,10 @@
+default = 'changelog'
+feedupdated = '<updated>#date|rfc3339date#</updated>'
+mimetype = 'application/atom+xml; charset={encoding}'
+header = header.tmpl
+changelog = changelog.tmpl
+changelogentry = changelogentry.tmpl
+filelog = filelog.tmpl
+filelogentry = filelogentry.tmpl
+tags = tags.tmpl
+tagentry = tagentry.tmpl
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/templates/atom/tagentry.tmpl	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,8 @@
+ <entry>
+  <title>#tag|escape#</title>
+  <link rel="alternate" href="{urlbase}{url}rev/{node}"/>
+  <id>http://www.selenic.com/mercurial/#tag-{node}</id>
+  <updated>#date|rfc3339date#</updated>
+  <published>#date|rfc3339date#</published>
+  <content type="text">#tag|strip|escape#</content>
+ </entry>
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/templates/atom/tags.tmpl	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,11 @@
+#header#
+ <id>{urlbase}{url}</id>
+ <link rel="self" href="{urlbase}{url}atom-tags"/>
+ <link rel="alternate" href="{urlbase}{url}tags"/>
+ <title>#repo|escape#: tags</title>
+ <summary>#repo|escape# tag history</summary>
+ <author><name>Mercurial SCM</name></author>
+ #latestentry%feedupdated#
+
+#entriesnotip%tagentry#
+</feed>
--- a/templates/changelog.tmpl	Thu Jul 26 07:56:27 2007 -0400
+++ b/templates/changelog.tmpl	Wed Feb 06 19:57:52 2008 -0800
@@ -1,5 +1,7 @@
 #header#
 <title>#repo|escape#: changelog</title>
+<link rel="alternate" type="application/atom+xml"
+   href="#url#atom-log" title="Atom feed for #repo|escape#">
 <link rel="alternate" type="application/rss+xml"
    href="#url#rss-log" title="RSS feed for #repo|escape#">
 </head>
@@ -11,6 +13,7 @@
 <a href="#url#file/#node|short#{sessionvars%urlparameter}">manifest</a>
 #archives%archiveentry#
 <a type="application/rss+xml" href="#url#rss-log">rss</a>
+<a type="application/atom+xml" href="#url#atom-log" title="Atom feed for #repo|escape#">atom</a>
 </div>
 
 <h2>changelog for #repo|escape#</h2>
--- a/templates/error.tmpl	Thu Jul 26 07:56:27 2007 -0400
+++ b/templates/error.tmpl	Wed Feb 06 19:57:52 2008 -0800
@@ -6,7 +6,7 @@
 <h2>Mercurial Error</h2>
 
 <p>
-An error occured while processing your request:
+An error occurred while processing your request:
 </p>
 <p>
 #error|escape#
--- a/templates/filelog.tmpl	Thu Jul 26 07:56:27 2007 -0400
+++ b/templates/filelog.tmpl	Wed Feb 06 19:57:52 2008 -0800
@@ -1,5 +1,7 @@
 #header#
 <title>#repo|escape#: #file|escape# history</title>
+<link rel="alternate" type="application/atom+xml"
+   href="#url#atom-log/tip/#file|urlescape#" title="Atom feed for #repo|escape#:#file#">
 <link rel="alternate" type="application/rss+xml"
    href="#url#rss-log/tip/#file|urlescape#" title="RSS feed for #repo|escape#:#file#">
 </head>
@@ -13,6 +15,7 @@
 <a href="#url#file/#node|short#/#file|urlescape#{sessionvars%urlparameter}">file</a>
 <a href="#url#annotate/#node|short#/#file|urlescape#{sessionvars%urlparameter}">annotate</a>
 <a type="application/rss+xml" href="#url#rss-log/tip/#file|urlescape#">rss</a>
+<a type="application/atom+xml" href="#url#atom-log/tip/#file|urlescape#" title="Atom feed for #repo|escape#:#file#">atom</a>
 </div>
 
 <h2>#file|escape# revision history</h2>
--- a/templates/gitweb/changelog.tmpl	Thu Jul 26 07:56:27 2007 -0400
+++ b/templates/gitweb/changelog.tmpl	Wed Feb 06 19:57:52 2008 -0800
@@ -1,12 +1,14 @@
 #header#
 <title>#repo|escape#: Changelog</title>
+<link rel="alternate" type="application/atom+xml"
+   href="{url}atom-log" title="Atom feed for #repo|escape#"/>
 <link rel="alternate" type="application/rss+xml"
-   href="{url}rss-log" title="RSS feed for #repo|escape#">
+   href="{url}rss-log" title="RSS feed for #repo|escape#"/>
 </head>
 <body>
 
 <div class="page_header">
-<a href="http://www.selenic.com/mercurial/" title="Mercurial"><div style="float:right;">Mercurial</div></a><a href="{url}summary{sessionvars%urlparameter}">#repo|escape#</a> / changelog
+<a href="http://www.selenic.com/mercurial/" title="Mercurial" style="float: right;">Mercurial</a><a href="{url}summary{sessionvars%urlparameter}">#repo|escape#</a> / changelog
 </div>
 
 <form action="{url}log">
@@ -15,7 +17,6 @@
 <input type="text" name="rev"  />
 </div>
 </form>
-</div>
 
 <div class="page_nav">
 <a href="{url}summary{sessionvars%urlparameter}">summary</a> | <a href="{url}shortlog/#rev#{sessionvars%urlparameter}">shortlog</a> | changelog | <a href="{url}tags{sessionvars%urlparameter}">tags</a> | <a href="{url}file/#node|short#{sessionvars%urlparameter}">manifest</a>#archives%archiveentry#
--- a/templates/gitweb/changeset.tmpl	Thu Jul 26 07:56:27 2007 -0400
+++ b/templates/gitweb/changeset.tmpl	Wed Feb 06 19:57:52 2008 -0800
@@ -1,12 +1,14 @@
 #header#
 <title>{repo|escape}: changeset {rev}:{node|short}</title>
+<link rel="alternate" type="application/atom+xml"
+   href="{url}atom-log" title="Atom feed for #repo|escape#"/>
 <link rel="alternate" type="application/rss+xml"
-   href="{url}rss-log" title="RSS feed for #repo|escape#">
+   href="{url}rss-log" title="RSS feed for #repo|escape#"/>
 </head>
 <body>
 
 <div class="page_header">
-<a href="http://www.selenic.com/mercurial/" title="Mercurial"><div style="float:right;">Mercurial</div></a><a href="#url#summary{sessionvars%urlparameter}">#repo|escape#</a> / changeset
+<a href="http://www.selenic.com/mercurial/" title="Mercurial" style="float: right;">Mercurial</a><a href="#url#summary{sessionvars%urlparameter}">#repo|escape#</a> / changeset
 </div>
 
 <div class="page_nav">
--- a/templates/gitweb/error.tmpl	Thu Jul 26 07:56:27 2007 -0400
+++ b/templates/gitweb/error.tmpl	Wed Feb 06 19:57:52 2008 -0800
@@ -1,12 +1,14 @@
 #header#
 <title>#repo|escape#: Error</title>
+<link rel="alternate" type="application/atom+xml"
+   href="{url}atom-log" title="Atom feed for #repo|escape#"/>
 <link rel="alternate" type="application/rss+xml"
-   href="{url}rss-log" title="RSS feed for #repo|escape#">
+   href="{url}rss-log" title="RSS feed for #repo|escape#"/>
 </head>
 <body>
 
 <div class="page_header">
-<a href="http://www.selenic.com/mercurial/" title="Mercurial"><div style="float:right;">Mercurial</div></a><a href="{url}summary{sessionvars%urlparameter}">#repo|escape#</a> / error
+<a href="http://www.selenic.com/mercurial/" title="Mercurial" style="float: right;">Mercurial</a><a href="{url}summary{sessionvars%urlparameter}">#repo|escape#</a> / error
 </div>
 
 <div class="page_nav">
@@ -15,7 +17,7 @@
 
 <div class="page_body">
 <br/>
-<i>An error occured while processing your request</i><br/>
+<i>An error occurred while processing your request</i><br/>
 <br/>
 {error|escape}
 </div>
--- a/templates/gitweb/fileannotate.tmpl	Thu Jul 26 07:56:27 2007 -0400
+++ b/templates/gitweb/fileannotate.tmpl	Wed Feb 06 19:57:52 2008 -0800
@@ -1,12 +1,14 @@
 #header#
 <title>{repo|escape}: {file|escape}@{node|short} (annotated)</title>
+<link rel="alternate" type="application/atom+xml"
+   href="{url}atom-log" title="Atom feed for #repo|escape#"/>
 <link rel="alternate" type="application/rss+xml"
-   href="{url}rss-log" title="RSS feed for #repo|escape#">
+   href="{url}rss-log" title="RSS feed for #repo|escape#"/>
 </head>
 <body>
 
 <div class="page_header">
-<a href="http://www.selenic.com/mercurial/" title="Mercurial"><div style="float:right;">Mercurial</div></a><a href="{url}summary{sessionvars%urlparameter}">#repo|escape#</a> / annotate
+<a href="http://www.selenic.com/mercurial/" title="Mercurial" style="float: right;">Mercurial</a><a href="{url}summary{sessionvars%urlparameter}">#repo|escape#</a> / annotate
 </div>
 
 <div class="page_nav">
--- a/templates/gitweb/filediff.tmpl	Thu Jul 26 07:56:27 2007 -0400
+++ b/templates/gitweb/filediff.tmpl	Wed Feb 06 19:57:52 2008 -0800
@@ -1,12 +1,14 @@
 {header}
 <title>{repo|escape}: diff {file|escape}</title>
+<link rel="alternate" type="application/atom+xml"
+   href="{url}atom-log" title="Atom feed for #repo|escape#"/>
 <link rel="alternate" type="application/rss+xml"
-   href="{url}rss-log" title="RSS feed for {repo|escape}">
+   href="{url}rss-log" title="RSS feed for {repo|escape}"/>
 </head>
 <body>
 
 <div class="page_header">
-<a href="http://www.selenic.com/mercurial/" title="Mercurial"><div style="float:right;">Mercurial</div></a><a href="{url}summary{sessionvars%urlparameter}">{repo|escape}</a> / diff
+<a href="http://www.selenic.com/mercurial/" title="Mercurial" style="float: right;">Mercurial</a><a href="{url}summary{sessionvars%urlparameter}">{repo|escape}</a> / diff
 </div>
 
 <div class="page_nav">
--- a/templates/gitweb/filelog.tmpl	Thu Jul 26 07:56:27 2007 -0400
+++ b/templates/gitweb/filelog.tmpl	Wed Feb 06 19:57:52 2008 -0800
@@ -1,12 +1,14 @@
 #header#
 <title>#repo|escape#: File revisions</title>
+<link rel="alternate" type="application/atom+xml"
+   href="{url}atom-log" title="Atom feed for #repo|escape#"/>
 <link rel="alternate" type="application/rss+xml"
-   href="{url}rss-log" title="RSS feed for #repo|escape#">
+   href="{url}rss-log" title="RSS feed for #repo|escape#"/>
 </head>
 <body>
 
 <div class="page_header">
-<a href="http://www.selenic.com/mercurial/" title="Mercurial"><div style="float:right;">Mercurial</div></a><a href="{url}summary{sessionvars%urlparameter}">#repo|escape#</a> / file revisions
+<a href="http://www.selenic.com/mercurial/" title="Mercurial" style="float: right;">Mercurial</a><a href="{url}summary{sessionvars%urlparameter}">#repo|escape#</a> / file revisions
 </div>
 
 <div class="page_nav">
--- a/templates/gitweb/filerevision.tmpl	Thu Jul 26 07:56:27 2007 -0400
+++ b/templates/gitweb/filerevision.tmpl	Wed Feb 06 19:57:52 2008 -0800
@@ -1,12 +1,14 @@
 #header#
 <title>{repo|escape}: {file|escape}@{node|short}</title>
+<link rel="alternate" type="application/atom+xml"
+   href="{url}atom-log" title="Atom feed for #repo|escape#"/>
 <link rel="alternate" type="application/rss+xml"
-   href="{url}rss-log" title="RSS feed for #repo|escape#">
+   href="{url}rss-log" title="RSS feed for #repo|escape#"/>
 </head>
 <body>
 
 <div class="page_header">
-<a href="http://www.selenic.com/mercurial/" title="Mercurial"><div style="float:right;">Mercurial</div></a><a href="{url}summary{sessionvars%urlparameter}">#repo|escape#</a> / file revision
+<a href="http://www.selenic.com/mercurial/" title="Mercurial" style="float: right;">Mercurial</a><a href="{url}summary{sessionvars%urlparameter}">#repo|escape#</a> / file revision
 </div>
 
 <div class="page_nav">
--- a/templates/gitweb/footer.tmpl	Thu Jul 26 07:56:27 2007 -0400
+++ b/templates/gitweb/footer.tmpl	Wed Feb 06 19:57:52 2008 -0800
@@ -1,6 +1,7 @@
 <div class="page_footer">
 <div class="page_footer_text">#repo|escape#</div>
-<a class="rss_logo" href="#url#rss-log">RSS</a>
+<a class="rss_logo" href="#url#rss-log">RSS</a> 
+<a class="rss_logo" href="#url#atom-log">Atom</a>
 <br />
 #motd#
 </div>
--- a/templates/gitweb/header.tmpl	Thu Jul 26 07:56:27 2007 -0400
+++ b/templates/gitweb/header.tmpl	Wed Feb 06 19:57:52 2008 -0800
@@ -1,10 +1,8 @@
-Content-type: text/html; charset={encoding}
-
 <?xml version="1.0" encoding="{encoding}"?>
 <!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
 <html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en-US" lang="en-US">
 <head>
-<link rel="icon" href="{staticurl}hgicon.png" type="image/png">
+<link rel="icon" href="{staticurl}hgicon.png" type="image/png" />
 <meta name="robots" content="index, nofollow"/>
 <link rel="stylesheet" href="{staticurl}style-gitweb.css" type="text/css" />
 
--- a/templates/gitweb/index.tmpl	Thu Jul 26 07:56:27 2007 -0400
+++ b/templates/gitweb/index.tmpl	Wed Feb 06 19:57:52 2008 -0800
@@ -4,7 +4,8 @@
 <body>
 
 <div class="page_header">
-<a href="http://www.selenic.com/mercurial/" title="Mercurial"><div style="float:right;">Mercurial</div></a>Repositories list
+    <a href="http://www.selenic.com/mercurial/" title="Mercurial" style="float: right;">Mercurial</a>
+    Repositories list
 </div>
 
 <table cellspacing="0">
@@ -14,7 +15,8 @@
         <td><a href="?sort=#sort_contact#">Contact</a></td>
         <td><a href="?sort=#sort_lastchange#">Last change</a></td>
         <td>&nbsp;</td>
-    <tr>
+        <td>&nbsp;</td>
+    </tr>
     #entries%indexentry#
 </table>
 <div class="page_footer">
--- a/templates/gitweb/manifest.tmpl	Thu Jul 26 07:56:27 2007 -0400
+++ b/templates/gitweb/manifest.tmpl	Wed Feb 06 19:57:52 2008 -0800
@@ -1,12 +1,14 @@
 #header#
 <title>#repo|escape#: Manifest</title>
+<link rel="alternate" type="application/atom+xml"
+   href="{url}atom-log" title="Atom feed for #repo|escape#"/>
 <link rel="alternate" type="application/rss+xml"
-   href="{url}rss-log" title="RSS feed for #repo|escape#">
+   href="{url}rss-log" title="RSS feed for #repo|escape#"/>
 </head>
 <body>
 
 <div class="page_header">
-<a href="http://www.selenic.com/mercurial/" title="Mercurial"><div style="float:right;">Mercurial</div></a><a href="{url}summary{sessionvars%urlparameter}">#repo|escape#</a> / manifest
+<a href="http://www.selenic.com/mercurial/" title="Mercurial" style="float: right;">Mercurial</a><a href="{url}summary{sessionvars%urlparameter}">#repo|escape#</a> / manifest
 </div>
 
 <div class="page_nav">
@@ -23,6 +25,7 @@
 <tr class="parity#upparity#">
 <td style="font-family:monospace">drwxr-xr-x</td>
 <td style="font-family:monospace"></td>
+<td style="font-family:monospace"></td>
 <td><a href="{url}file/#node|short##up|urlescape#{sessionvars%urlparameter}">[up]</a></td>
 <td class="link">&nbsp;</td>
 </tr>
--- a/templates/gitweb/map	Thu Jul 26 07:56:27 2007 -0400
+++ b/templates/gitweb/map	Wed Feb 06 19:57:52 2008 -0800
@@ -1,4 +1,5 @@
 default = 'summary'
+mimetype = 'text/html; charset={encoding}'
 header = header.tmpl
 footer = footer.tmpl
 search = search.tmpl
@@ -16,18 +17,18 @@
 searchentry = changelogentry.tmpl
 changeset = changeset.tmpl
 manifest = manifest.tmpl
-manifestdirentry = '<tr class="parity#parity#"><td style="font-family:monospace">drwxr-xr-x</td><td style="font-family:monospace"></td><td><a href="#url#file/#node|short##path|urlescape#{sessionvars%urlparameter}">#basename|escape#</a></td><td class="link"><a href="#url#file/#node|short##path|urlescape#{sessionvars%urlparameter}">manifest</a></td></tr>'
-manifestfileentry = '<tr class="parity#parity#"><td style="font-family:monospace">#permissions|permissions#</td><td style="font-family:monospace" align=right>#size#</td><td class="list"><a class="list" href="#url#file/#node|short#/#file|urlescape#{sessionvars%urlparameter}">#basename|escape#</a></td><td class="link"><a href="#url#file/#node|short#/#file|urlescape#{sessionvars%urlparameter}">file</a> | <a href="#url#log/#node|short#/#file|urlescape#{sessionvars%urlparameter}">revisions</a> | <a href="#url#annotate/#node|short#/#file|urlescape#{sessionvars%urlparameter}">annotate</a></td></tr>'
+manifestdirentry = '<tr class="parity#parity#"><td style="font-family:monospace">drwxr-xr-x</td><td style="font-family:monospace"></td><td style="font-family:monospace"></td><td><a href="#url#file/#node|short##path|urlescape#{sessionvars%urlparameter}">#basename|escape#</a></td><td class="link"><a href="#url#file/#node|short##path|urlescape#{sessionvars%urlparameter}">manifest</a></td></tr>'
+manifestfileentry = '<tr class="parity#parity#"><td style="font-family:monospace">#permissions|permissions#</td><td style="font-family:monospace" align=right>#date|isodate#</td><td style="font-family:monospace" align=right>#size#</td><td class="list"><a class="list" href="#url#file/#node|short#/#file|urlescape#{sessionvars%urlparameter}">#basename|escape#</a></td><td class="link"><a href="#url#file/#node|short#/#file|urlescape#{sessionvars%urlparameter}">file</a> | <a href="#url#log/#node|short#/#file|urlescape#{sessionvars%urlparameter}">revisions</a> | <a href="#url#annotate/#node|short#/#file|urlescape#{sessionvars%urlparameter}">annotate</a></td></tr>'
 filerevision = filerevision.tmpl
 fileannotate = fileannotate.tmpl
 filediff = filediff.tmpl
 filelog = filelog.tmpl
 fileline = '<div style="font-family:monospace" class="parity#parity#"><pre><span class="linenr">   #linenumber#</span> #line|escape#</pre></div>'
 annotateline = '<tr style="font-family:monospace" class="parity#parity#"><td class="linenr" style="text-align: right;"><a href="#url#annotate/#node|short#/#file|urlescape#{sessionvars%urlparameter}">#author|obfuscate#@#rev#</a></td><td><pre>#line|escape#</pre></td></tr>'
-difflineplus = '<div style="color:#008800;">#line|escape#</div>'
-difflineminus = '<div style="color:#cc0000;">#line|escape#</div>'
-difflineat = '<div style="color:#990099;">#line|escape#</div>'
-diffline = '<div>#line|escape#</div>'
+difflineplus = '<span style="color:#008800;">#line|escape#</span>'
+difflineminus = '<span style="color:#cc0000;">#line|escape#</span>'
+difflineat = '<span style="color:#990099;">#line|escape#</span>'
+diffline = '<span>#line|escape#</span>'
 changelogparent = '<tr><th class="parent">parent #rev#:</th><td class="parent"><a href="#url#rev/#node|short#{sessionvars%urlparameter}">#node|short#</a></td></tr>'
 changesetparent = '<tr><td>parent {rev}</td><td style="font-family:monospace"><a class="list" href="{url}rev/{node|short}{sessionvars%urlparameter}">{node|short}</a></td></tr>'
 filerevparent = '<tr><td>parent {rev}</td><td style="font-family:monospace"><a class="list" href="{url}file/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{rename%filerename}{node|short}</a></td></tr>'
@@ -40,7 +41,7 @@
 fileannotatechild = '<tr><td>child {rev}</td><td style="font-family:monospace"><a class="list" href="{url}annotate/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{node|short}</a></td></tr>'
 tags = tags.tmpl
 tagentry = '<tr class="parity#parity#"><td class="age"><i>#date|age# ago</i></td><td><a class="list" href="{url}rev/{node|short}{sessionvars%urlparameter}"><b>#tag|escape#</b></a></td><td class="link"><a href="{url}rev/#node|short#{sessionvars%urlparameter}">changeset</a> | <a href="{url}log/#node|short#{sessionvars%urlparameter}">changelog</a> | <a href="{url}file/#node|short#{sessionvars%urlparameter}">manifest</a></td></tr>'
-branchentry = '<tr class="parity{parity}"><td class="age"><i>{date|age} ago</i></td><td><a class="list" href="{url}rev/{node|short}{sessionvars%urlparameter}"><b>{node|short}</b></td><td>{branch|escape}</td><td class="link"><a href="{url}rev/{node|short}{sessionvars%urlparameter}">changeset</a> | <a href="{url}log/{node|short}{sessionvars%urlparameter}">changelog</a> | <a href="{url}file/{node|short}{sessionvars%urlparameter}">manifest</a></td></tr>'
+branchentry = '<tr class="parity{parity}"><td class="age"><i>{date|age} ago</i></td><td><a class="list" href="{url}rev/{node|short}{sessionvars%urlparameter}"><b>{node|short}</b></a></td><td>{branch|escape}</td><td class="link"><a href="{url}rev/{node|short}{sessionvars%urlparameter}">changeset</a> | <a href="{url}log/{node|short}{sessionvars%urlparameter}">changelog</a> | <a href="{url}file/{node|short}{sessionvars%urlparameter}">manifest</a></td></tr>'
 diffblock = '<pre>#lines#</pre>'
 filediffparent = '<tr><td>parent {rev}</td><td style="font-family:monospace"><a class="list" href="{url}diff/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{node|short}</a></td></tr>'
 filelogparent = '<tr><td align="right">parent #rev#:&nbsp;</td><td><a href="{url}file/{node|short}/#file|urlescape#{sessionvars%urlparameter}">#node|short#</a></td></tr>'
@@ -52,7 +53,7 @@
 shortlogentry = '<tr class="parity#parity#"><td class="age"><i>#date|age# ago</i></td><td><i>#author|person#</i></td><td><a class="list" href="{url}rev/#node|short#{sessionvars%urlparameter}"><b>#desc|strip|firstline|escape#</b> <span class="logtags">{branches%branchtag}{tags%tagtag}</span></a></td><td class="link" nowrap><a href="{url}rev/#node|short#{sessionvars%urlparameter}">changeset</a> | <a href="{url}file/#node|short#{sessionvars%urlparameter}">manifest</a></td></tr>'
 filelogentry = '<tr class="parity#parity#"><td class="age"><i>#date|age# ago</i></td><td><a class="list" href="{url}rev/#node|short#{sessionvars%urlparameter}"><b>#desc|strip|firstline|escape#</b></a></td><td class="link"><a href="{url}file/#node|short#/#file|urlescape#{sessionvars%urlparameter}">file</a>&nbsp;|&nbsp;<a href="{url}diff/#node|short#/#file|urlescape#{sessionvars%urlparameter}">diff</a>&nbsp;|&nbsp;<a href="{url}annotate/#node|short#/#file|urlescape#{sessionvars%urlparameter}">annotate</a> #rename%filelogrename#</td></tr>'
 archiveentry = ' | <a href="{url}archive/{node|short}{extension}">#type|escape#</a> '
-indexentry = '<tr class="parity#parity#"><td><a class="list" href="#url#{sessionvars%urlparameter}"><b>#name|escape#</b></a></td><td>#description#</td><td>#contact|obfuscate#</td><td class="age">#lastchange|age# ago</td><td class="indexlinks"><a class="rss_logo" href="#url#rss-log">RSS</a> #archives%archiveentry#</td></tr>'
+indexentry = '<tr class="parity#parity#"><td><a class="list" href="#url#{sessionvars%urlparameter}"><b>#name|escape#</b></a></td><td>#description#</td><td>#contact|obfuscate#</td><td class="age">#lastchange|age# ago</td><td class="indexlinks">#archives%archiveentry#</td><td><a class="rss_logo" href="#url#rss-log">RSS</a> <a class="rss_logo" href="#url#atom-log">Atom</a></td></tr>'
 index = index.tmpl
 urlparameter = '#separator##name#=#value|urlescape#'
 hiddenformentry = '<input type="hidden" name="#name#" value="#value|escape#" />'
--- a/templates/gitweb/notfound.tmpl	Thu Jul 26 07:56:27 2007 -0400
+++ b/templates/gitweb/notfound.tmpl	Wed Feb 06 19:57:52 2008 -0800
@@ -1,12 +1,11 @@
 {header}
-<title>Mercurial repositories index</title>
+<title>Mercurial repository not found</title>
 </head>
 
 <body>
 
 <div class="page_header">
-<a href="http://www.selenic.com/mercurial/" title="Mercurial"><div
-  style="float:right;">Mercurial</div></a> Not found: {repo|escape}
+<a href="http://www.selenic.com/mercurial/" title="Mercurial" style="float: right;">Mercurial</a> Not found: {repo|escape}
 </div>
 
 <div class="page_body">
--- a/templates/gitweb/search.tmpl	Thu Jul 26 07:56:27 2007 -0400
+++ b/templates/gitweb/search.tmpl	Wed Feb 06 19:57:52 2008 -0800
@@ -1,12 +1,14 @@
 #header#
 <title>#repo|escape#: Search</title>
+<link rel="alternate" type="application/atom+xml"
+   href="{url}atom-log" title="Atom feed for #repo|escape#"/>
 <link rel="alternate" type="application/rss+xml"
-   href="{url}rss-log" title="RSS feed for #repo|escape#">
+   href="{url}rss-log" title="RSS feed for #repo|escape#"/>
 </head>
 <body>
 
 <div class="page_header">
-<a href="http://www.selenic.com/mercurial/" title="Mercurial"><div style="float:right;">Mercurial</div></a><a href="{url}summary{sessionvars%urlparameter}">#repo|escape#</a> / search
+<a href="http://www.selenic.com/mercurial/" title="Mercurial" style="float: right;">Mercurial</a><a href="{url}summary{sessionvars%urlparameter}">#repo|escape#</a> / search
 
 <form action="{url}log">
 {sessionvars%hiddenformentry}
--- a/templates/gitweb/shortlog.tmpl	Thu Jul 26 07:56:27 2007 -0400
+++ b/templates/gitweb/shortlog.tmpl	Wed Feb 06 19:57:52 2008 -0800
@@ -1,12 +1,14 @@
 #header#
 <title>#repo|escape#: Shortlog</title>
+<link rel="alternate" type="application/atom+xml"
+   href="{url}atom-log" title="Atom feed for #repo|escape#"/>
 <link rel="alternate" type="application/rss+xml"
-   href="{url}rss-log" title="RSS feed for #repo|escape#">
+   href="{url}rss-log" title="RSS feed for #repo|escape#"/>
 </head>
 <body>
 
 <div class="page_header">
-<a href="http://www.selenic.com/mercurial/" title="Mercurial"><div style="float:right;">Mercurial</div></a><a href="{url}summary{sessionvars%urlparameter}">#repo|escape#</a> / shortlog
+<a href="http://www.selenic.com/mercurial/" title="Mercurial" style="float: right;">Mercurial</a><a href="{url}summary{sessionvars%urlparameter}">#repo|escape#</a> / shortlog
 </div>
 
 <form action="{url}log">
@@ -15,7 +17,6 @@
 <input type="text" name="rev"  />
 </div>
 </form>
-</div>
 <div class="page_nav">
 <a href="{url}summary{sessionvars%urlparameter}">summary</a> |
 shortlog |
--- a/templates/gitweb/summary.tmpl	Thu Jul 26 07:56:27 2007 -0400
+++ b/templates/gitweb/summary.tmpl	Wed Feb 06 19:57:52 2008 -0800
@@ -1,12 +1,14 @@
 #header#
 <title>#repo|escape#: Summary</title>
+<link rel="alternate" type="application/atom+xml"
+   href="{url}atom-log" title="Atom feed for #repo|escape#"/>
 <link rel="alternate" type="application/rss+xml"
-   href="{url}rss-log" title="RSS feed for #repo|escape#">
+   href="{url}rss-log" title="RSS feed for #repo|escape#"/>
 </head>
 <body>
 
 <div class="page_header">
-<a href="http://www.selenic.com/mercurial/" title="Mercurial"><div style="float:right;">Mercurial</div></a><a href="{url}summary{sessionvars%urlparameter}">#repo|escape#</a> / summary
+<a href="http://www.selenic.com/mercurial/" title="Mercurial" style="float: right;">Mercurial</a><a href="{url}summary{sessionvars%urlparameter}">#repo|escape#</a> / summary
 
 <form action="{url}log">
 {sessionvars%hiddenformentry}
@@ -28,7 +30,7 @@
 <div class="title">&nbsp;</div>
 <table cellspacing="0">
 <tr><td>description</td><td>#desc#</td></tr>
-<tr><td>owner</td><td>#owner|escape#</td></tr>
+<tr><td>owner</td><td>#owner|obfuscate#</td></tr>
 <tr><td>last change</td><td>#lastchange|rfc822date#</td></tr>
 </table>
 
--- a/templates/gitweb/tags.tmpl	Thu Jul 26 07:56:27 2007 -0400
+++ b/templates/gitweb/tags.tmpl	Wed Feb 06 19:57:52 2008 -0800
@@ -1,12 +1,14 @@
 #header#
 <title>#repo|escape#: Tags</title>
+<link rel="alternate" type="application/atom+xml"
+   href="{url}atom-log" title="Atom feed for #repo|escape#"/>
 <link rel="alternate" type="application/rss+xml"
-   href="{url}rss-log" title="RSS feed for #repo|escape#">
+   href="{url}rss-log" title="RSS feed for #repo|escape#"/>
 </head>
 <body>
 
 <div class="page_header">
-<a href="http://www.selenic.com/mercurial/" title="Mercurial"><div style="float:right;">Mercurial</div></a><a href="{url}summary{sessionvars%urlparameter}">#repo|escape#</a> / tags
+<a href="http://www.selenic.com/mercurial/" title="Mercurial" style="float: right;">Mercurial</a><a href="{url}summary{sessionvars%urlparameter}">#repo|escape#</a> / tags
 </div>
 
 <div class="page_nav">
--- a/templates/header.tmpl	Thu Jul 26 07:56:27 2007 -0400
+++ b/templates/header.tmpl	Wed Feb 06 19:57:52 2008 -0800
@@ -1,8 +1,6 @@
-Content-type: text/html; charset={encoding}
-
 <!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN">
 <html>
 <head>
-<link rel="icon" href="#staticurl#hgicon.png" type="image/png">
+<link rel="icon" href="#staticurl#hgicon.png" type="image/png" />
 <meta name="robots" content="index, nofollow" />
 <link rel="stylesheet" href="#staticurl#style.css" type="text/css" />
--- a/templates/manifest.tmpl	Thu Jul 26 07:56:27 2007 -0400
+++ b/templates/manifest.tmpl	Wed Feb 06 19:57:52 2008 -0800
@@ -17,6 +17,7 @@
 <tr class="parity#upparity#">
   <td><tt>drwxr-xr-x</tt>&nbsp;
   <td>&nbsp;
+  <td>&nbsp;
   <td><a href="#url#file/#node|short##up|urlescape#{sessionvars%urlparameter}">[up]</a>
 </tr>
 #dentries%manifestdirentry#
--- a/templates/map	Thu Jul 26 07:56:27 2007 -0400
+++ b/templates/map	Wed Feb 06 19:57:52 2008 -0800
@@ -1,4 +1,5 @@
 default = 'shortlog'
+mimetype = 'text/html; charset={encoding}'
 header = header.tmpl
 footer = footer.tmpl
 search = search.tmpl
@@ -15,8 +16,8 @@
 searchentry = changelogentry.tmpl
 changeset = changeset.tmpl
 manifest = manifest.tmpl
-manifestdirentry = '<tr class="parity#parity#"><td><tt>drwxr-xr-x</tt>&nbsp;<td>&nbsp;<td><a href="#url#file/#node|short##path|urlescape#{sessionvars%urlparameter}">#basename|escape#/</a>'
-manifestfileentry = '<tr class="parity#parity#"><td><tt>#permissions|permissions#</tt>&nbsp;<td align=right><tt>#size#</tt>&nbsp;<td><a href="#url#file/#node|short#/#file|urlescape#{sessionvars%urlparameter}">#basename|escape#</a>'
+manifestdirentry = '<tr class="parity#parity#"><td><tt>drwxr-xr-x</tt>&nbsp;<td>&nbsp;<td>&nbsp;<td><a href="#url#file/#node|short##path|urlescape#{sessionvars%urlparameter}">#basename|escape#/</a>'
+manifestfileentry = '<tr class="parity#parity#"><td><tt>#permissions|permissions#</tt>&nbsp;<td align=right><tt class="date">#date|isodate#</tt>&nbsp;<td align=right><tt>#size#</tt>&nbsp;<td><a href="#url#file/#node|short#/#file|urlescape#{sessionvars%urlparameter}">#basename|escape#</a>'
 filerevision = filerevision.tmpl
 fileannotate = fileannotate.tmpl
 filediff = filediff.tmpl
@@ -47,7 +48,7 @@
 filelogparent = '<tr><th>parent #rev#:</th><td><a href="#url#file/#node|short#/#file|urlescape#{sessionvars%urlparameter}">#node|short#</a></td></tr>'
 filediffchild = '<tr><th class="child">child #rev#:</th><td class="child"><a href="#url#rev/#node|short#{sessionvars%urlparameter}">#node|short#</a></td></tr>'
 filelogchild = '<tr><th>child #rev#:</th><td><a href="#url#file/#node|short#/#file|urlescape#{sessionvars%urlparameter}">#node|short#</a></td></tr>'
-indexentry = '<tr class="parity#parity#"><td><a href="#url#{sessionvars%urlparameter}">#name|escape#</a></td><td>#description#</td><td>#contact|obfuscate#</td><td class="age">#lastchange|age# ago</td><td class="indexlinks"><a href="#url#rss-log">RSS</a> #archives%archiveentry#</td></tr>'
+indexentry = '<tr class="parity#parity#"><td><a href="#url#{sessionvars%urlparameter}">#name|escape#</a></td><td>#description#</td><td>#contact|obfuscate#</td><td class="age">#lastchange|age# ago</td><td class="indexlinks"><a href="#url#rss-log">RSS</a> <a href="#url#atom-log">Atom</a> #archives%archiveentry#</td></tr>'
 index = index.tmpl
 archiveentry = '<a href="#url#archive/#node|short##extension|urlescape#">#type|escape#</a> '
 notfound = notfound.tmpl
--- a/templates/map-cmdline.default	Thu Jul 26 07:56:27 2007 -0400
+++ b/templates/map-cmdline.default	Wed Feb 06 19:57:52 2008 -0800
@@ -1,10 +1,13 @@
 changeset = 'changeset:   {rev}:{node|short}\n{branches}{tags}{parents}user:        {author}\ndate:        {date|date}\nsummary:     {desc|firstline}\n\n'
 changeset_quiet = '{rev}:{node|short}\n'
-changeset_verbose = 'changeset:   {rev}:{node|short}\n{branches}{tags}{parents}{manifest}user:        {author}\ndate:        {date|date}\n{files}{file_adds}{file_dels}{file_copies}description:\n{desc|strip}\n\n\n'
-changeset_debug = 'changeset:   {rev}:{node}\n{branches}{tags}{parents}{manifest}user:        {author}\ndate:        {date|date}\n{files}{file_adds}{file_dels}{file_copies}{extras}description:\n{desc|strip}\n\n\n'
+changeset_verbose = 'changeset:   {rev}:{node|short}\n{branches}{tags}{parents}user:        {author}\ndate:        {date|date}\n{files}{file_copies}description:\n{desc|strip}\n\n\n'
+changeset_debug = 'changeset:   {rev}:{node}\n{branches}{tags}{parents}{manifest}user:        {author}\ndate:        {date|date}\n{file_mods}{file_adds}{file_dels}{file_copies}{extras}description:\n{desc|strip}\n\n\n'
 start_files = 'files:      '
 file = ' {file}'
 end_files = '\n'
+start_file_mods = 'files:      '
+file_mod = ' {file_mod}'
+end_file_mods = '\n'
 start_file_adds = 'files+:     '
 file_add = ' {file_add}'
 end_file_adds = '\n'
--- a/templates/notfound.tmpl	Thu Jul 26 07:56:27 2007 -0400
+++ b/templates/notfound.tmpl	Wed Feb 06 19:57:52 2008 -0800
@@ -1,9 +1,9 @@
 #header#
-<title>Mercurial repositories index</title>
+<title>Mercurial repository not found</title>
 </head>
 <body>
 
-<h2>Mercurial Repositories</h2>
+<h2>Mercurial repository not found</h2>
 
 The specified repository "#repo|escape#" is unknown, sorry.
 
--- a/templates/old/changelog.tmpl	Thu Jul 26 07:56:27 2007 -0400
+++ b/templates/old/changelog.tmpl	Wed Feb 06 19:57:52 2008 -0800
@@ -1,5 +1,7 @@
 #header#
 <title>#repo|escape#: changelog</title>
+<link rel="alternate" type="application/atom+xml"
+   href="#url#atom-log" title="Atom feed for #repo|escape#">
 <link rel="alternate" type="application/rss+xml"
    href="?cmd=changelog;style=rss" title="RSS feed for #repo|escape#">
 </head>
@@ -11,6 +13,7 @@
 <a href="?mf=#node|short#;path=/">manifest</a>
 #archives%archiveentry#
 <a type="application/rss+xml" href="?style=rss">rss</a>
+<a type="application/atom+xml" href="#url#atom-log" title="Atom feed for #repo|escape#">atom</a>
 </div>
 
 <h2>changelog for #repo|escape#</h2>
--- a/templates/old/filelog.tmpl	Thu Jul 26 07:56:27 2007 -0400
+++ b/templates/old/filelog.tmpl	Wed Feb 06 19:57:52 2008 -0800
@@ -1,5 +1,7 @@
 #header#
 <title>#repo|escape#: #file|escape# history</title>
+<link rel="alternate" type="application/atom+xml"
+   href="#url#atom-log/tip/#file|urlescape#" title="Atom feed for #repo|escape#:#file#">
 <link rel="alternate" type="application/rss+xml"
    href="?fl=0;file=#file|urlescape#;style=rss" title="RSS feed for #repo|escape#:#file#">
 </head>
@@ -13,6 +15,7 @@
 <a href="?f=#node|short#;file=#file|urlescape#">file</a>
 <a href="?fa=#node|short#;file=#file|urlescape#">annotate</a>
 <a type="application/rss+xml" href="?fl=0;file=#file|urlescape#;style=rss">rss</a>
+<a type="application/atom+xml" href="#url#atom-log/tip/#file|urlescape#" title="Atom feed for #repo|escape#:#file#">atom</a>
 </div>
 
 <h2>#file|escape# revision history</h2>
--- a/templates/old/header.tmpl	Thu Jul 26 07:56:27 2007 -0400
+++ b/templates/old/header.tmpl	Wed Feb 06 19:57:52 2008 -0800
@@ -1,8 +1,6 @@
-Content-type: text/html
-
 <!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN">
 <html>
 <head>
-<link rel="icon" href="?static=hgicon.png" type="image/png">
+<link rel="icon" href="?static=hgicon.png" type="image/png" />
 <meta name="robots" content="index, nofollow" />
 <link rel="stylesheet" href="?static=style.css" type="text/css" />
--- a/templates/old/map	Thu Jul 26 07:56:27 2007 -0400
+++ b/templates/old/map	Wed Feb 06 19:57:52 2008 -0800
@@ -1,4 +1,5 @@
 default = 'changelog'
+mimetype = 'text/html'
 header = header.tmpl
 footer = footer.tmpl
 search = search.tmpl
@@ -46,7 +47,7 @@
 filelogparent = '<tr><th>parent #rev#:</th><td><a href="?f=#node|short#;file=#file|urlescape#">#node|short#</a></td></tr>'
 filediffchild = '<tr><th class="child">child #rev#:</th><td class="child"><a href="?cs=#node|short#">#node|short#</a></td></tr>'
 filelogchild = '<tr><th>child #rev#:</th><td><a href="?f=#node|short#;file=#file|urlescape#">#node|short#</a></td></tr>'
-indexentry = '<tr class="parity#parity#"><td><a href="#url#">#name|escape#</a></td><td>#description#</td><td>#contact|obfuscate#</td><td class="age">#lastchange|age# ago</td><td class="indexlinks"><a href="#url#?cl=tip;style=rss">RSS</a> #archives%archiveentry#</td></tr>'
+indexentry = '<tr class="parity#parity#"><td><a href="#url#">#name|escape#</a></td><td>#description#</td><td>#contact|obfuscate#</td><td class="age">#lastchange|age# ago</td><td class="indexlinks"><a href="#url#?cl=tip;style=rss">RSS</a> <a href="#url#atom-log">Atom</a> #archives%archiveentry#</td></tr>'
 index = index.tmpl
 archiveentry = '<a href="#url#?ca=#node|short#;type=#type|urlescape#">#type|escape#</a> '
 notfound = notfound.tmpl
--- a/templates/old/shortlog.tmpl	Thu Jul 26 07:56:27 2007 -0400
+++ b/templates/old/shortlog.tmpl	Wed Feb 06 19:57:52 2008 -0800
@@ -1,5 +1,7 @@
 #header#
 <title>#repo|escape#: shortlog</title>
+<link rel="alternate" type="application/atom+xml"
+   href="#url#atom-log" title="Atom feed for #repo|escape#">
 <link rel="alternate" type="application/rss+xml"
    href="?cmd=changelog;style=rss" title="RSS feed for #repo|escape#">
 </head>
@@ -11,6 +13,7 @@
 <a href="?mf=#node|short#;path=/">manifest</a>
 #archives%archiveentry#
 <a type="application/rss+xml" href="?style=rss">rss</a>
+<a type="application/atom+xml" href="#url#atom-log" title="Atom feed for #repo|escape#">atom</a>
 </div>
 
 <h2>shortlog for #repo|escape#</h2>
--- a/templates/old/tags.tmpl	Thu Jul 26 07:56:27 2007 -0400
+++ b/templates/old/tags.tmpl	Wed Feb 06 19:57:52 2008 -0800
@@ -1,5 +1,7 @@
 #header#
 <title>#repo|escape#: tags</title>
+<link rel="alternate" type="application/atom+xml"
+   href="#url#atom-tags" title="Atom feed for #repo|escape#: tags">
 <link rel="alternate" type="application/rss+xml"
    href="?cmd=tags;style=rss" title="RSS feed for #repo|escape#: tags">
 </head>
@@ -10,6 +12,7 @@
 <a href="?sl=tip">shortlog</a>
 <a href="?mf=#node|short#;path=/">manifest</a>
 <a type="application/rss+xml" href="?cmd=tags;style=rss">rss</a>
+<a type="application/atom+xml" href="#url#atom-tags">atom</a>
 </div>
 
 <h2>tags:</h2>
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/templates/raw/error.tmpl	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,2 @@
+#header#
+error: #error#
--- a/templates/raw/header.tmpl	Thu Jul 26 07:56:27 2007 -0400
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,2 +0,0 @@
-Content-type: text/plain; charset={encoding}
-
--- a/templates/raw/map	Thu Jul 26 07:56:27 2007 -0400
+++ b/templates/raw/map	Wed Feb 06 19:57:52 2008 -0800
@@ -1,4 +1,5 @@
-header = header.tmpl
+mimetype = 'text/plain; charset={encoding}'
+header = ''
 footer = ''
 changeset = changeset.tmpl
 difflineplus = '#line#'
@@ -8,7 +9,6 @@
 changesetparent = '# Parent #node#'
 changesetchild = '# Child #node#'
 filenodelink = ''
-filerevision = '#rawfileheader##raw#'
 fileline = '#line#'
 diffblock = '#lines#'
 filediff = filediff.tmpl
@@ -18,4 +18,6 @@
 manifestdirentry = 'drwxr-xr-x {basename}\n'
 manifestfileentry = '{permissions|permissions} {size} {basename}\n'
 index = index.tmpl
+notfound = notfound.tmpl
+error = error.tmpl
 indexentry = '#url#\n'
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/templates/raw/notfound.tmpl	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,2 @@
+#header#
+error: repository #repo# not found
--- a/templates/rss/header.tmpl	Thu Jul 26 07:56:27 2007 -0400
+++ b/templates/rss/header.tmpl	Wed Feb 06 19:57:52 2008 -0800
@@ -1,5 +1,3 @@
-Content-type: text/xml; charset={encoding}
-
 <?xml version="1.0" encoding="{encoding}"?>
 <rss version="2.0">
   <channel>
--- a/templates/rss/map	Thu Jul 26 07:56:27 2007 -0400
+++ b/templates/rss/map	Wed Feb 06 19:57:52 2008 -0800
@@ -1,4 +1,5 @@
 default = 'changelog'
+mimetype = 'text/xml; charset={encoding}'
 header = header.tmpl
 changelog = changelog.tmpl
 changelogentry = changelogentry.tmpl
--- a/templates/shortlog.tmpl	Thu Jul 26 07:56:27 2007 -0400
+++ b/templates/shortlog.tmpl	Wed Feb 06 19:57:52 2008 -0800
@@ -1,5 +1,7 @@
 #header#
 <title>#repo|escape#: shortlog</title>
+<link rel="alternate" type="application/atom+xml"
+   href="#url#atom-log" title="Atom feed for #repo|escape#">
 <link rel="alternate" type="application/rss+xml"
    href="#url#rss-log" title="RSS feed for #repo|escape#">
 </head>
@@ -11,6 +13,7 @@
 <a href="#url#file/#node|short#/{sessionvars%urlparameter}">manifest</a>
 #archives%archiveentry#
 <a type="application/rss+xml" href="#url#rss-log">rss</a>
+<a type="application/rss+xml" href="#url#atom-log" title="Atom feed for #repo|escape#">atom</a>
 </div>
 
 <h2>shortlog for #repo|escape#</h2>
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/templates/static/highlight.css	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,59 @@
+.c { color: #808080 } /* Comment */
+.err { color: #F00000; background-color: #F0A0A0 } /* Error */
+.k { color: #008000; font-weight: bold } /* Keyword */
+.o { color: #303030 } /* Operator */
+.cm { color: #808080 } /* Comment.Multiline */
+.cp { color: #507090 } /* Comment.Preproc */
+.c1 { color: #808080 } /* Comment.Single */
+.cs { color: #cc0000; font-weight: bold } /* Comment.Special */
+.gd { color: #A00000 } /* Generic.Deleted */
+.ge { font-style: italic } /* Generic.Emph */
+.gr { color: #FF0000 } /* Generic.Error */
+.gh { color: #000080; font-weight: bold } /* Generic.Heading */
+.gi { color: #00A000 } /* Generic.Inserted */
+.go { color: #808080 } /* Generic.Output */
+.gp { color: #c65d09; font-weight: bold } /* Generic.Prompt */
+.gs { font-weight: bold } /* Generic.Strong */
+.gu { color: #800080; font-weight: bold } /* Generic.Subheading */
+.gt { color: #0040D0 } /* Generic.Traceback */
+.kc { color: #008000; font-weight: bold } /* Keyword.Constant */
+.kd { color: #008000; font-weight: bold } /* Keyword.Declaration */
+.kp { color: #003080; font-weight: bold } /* Keyword.Pseudo */
+.kr { color: #008000; font-weight: bold } /* Keyword.Reserved */
+.kt { color: #303090; font-weight: bold } /* Keyword.Type */
+.m { color: #6000E0; font-weight: bold } /* Literal.Number */
+.s { background-color: #fff0f0 } /* Literal.String */
+.na { color: #0000C0 } /* Name.Attribute */
+.nb { color: #007020 } /* Name.Builtin */
+.nc { color: #B00060; font-weight: bold } /* Name.Class */
+.no { color: #003060; font-weight: bold } /* Name.Constant */
+.nd { color: #505050; font-weight: bold } /* Name.Decorator */
+.ni { color: #800000; font-weight: bold } /* Name.Entity */
+.ne { color: #F00000; font-weight: bold } /* Name.Exception */
+.nf { color: #0060B0; font-weight: bold } /* Name.Function */
+.nl { color: #907000; font-weight: bold } /* Name.Label */
+.nn { color: #0e84b5; font-weight: bold } /* Name.Namespace */
+.nt { color: #007000 } /* Name.Tag */
+.nv { color: #906030 } /* Name.Variable */
+.ow { color: #000000; font-weight: bold } /* Operator.Word */
+.w { color: #bbbbbb } /* Text.Whitespace */
+.mf { color: #6000E0; font-weight: bold } /* Literal.Number.Float */
+.mh { color: #005080; font-weight: bold } /* Literal.Number.Hex */
+.mi { color: #0000D0; font-weight: bold } /* Literal.Number.Integer */
+.mo { color: #4000E0; font-weight: bold } /* Literal.Number.Oct */
+.sb { background-color: #fff0f0 } /* Literal.String.Backtick */
+.sc { color: #0040D0 } /* Literal.String.Char */
+.sd { color: #D04020 } /* Literal.String.Doc */
+.s2 { background-color: #fff0f0 } /* Literal.String.Double */
+.se { color: #606060; font-weight: bold; background-color: #fff0f0 } /* Literal.String.Escape */
+.sh { background-color: #fff0f0 } /* Literal.String.Heredoc */
+.si { background-color: #e0e0e0 } /* Literal.String.Interpol */
+.sx { color: #D02000; background-color: #fff0f0 } /* Literal.String.Other */
+.sr { color: #000000; background-color: #fff0ff } /* Literal.String.Regex */
+.s1 { background-color: #fff0f0 } /* Literal.String.Single */
+.ss { color: #A06000 } /* Literal.String.Symbol */
+.bp { color: #007020 } /* Name.Builtin.Pseudo */
+.vc { color: #306090 } /* Name.Variable.Class */
+.vg { color: #d07000; font-weight: bold } /* Name.Variable.Global */
+.vi { color: #3030B0 } /* Name.Variable.Instance */
+.il { color: #0000D0; font-weight: bold } /* Literal.Number.Integer.Long */
--- a/templates/static/style-gitweb.css	Thu Jul 26 07:56:27 2007 -0400
+++ b/templates/static/style-gitweb.css	Wed Feb 06 19:57:52 2008 -0800
@@ -40,7 +40,7 @@
 div.search { margin:4px 8px; position:absolute; top:56px; right:12px }
 .linenr { color:#999999; text-decoration:none }
 a.rss_logo {
-	float:right; padding:3px 0px; width:35px; line-height:10px;
+	float:right; padding:3px 6px; line-height:10px;
 	border:1px solid; border-color:#fcc7a5 #7d3302 #3e1a01 #ff954e;
 	color:#ffffff; background-color:#ff6600;
 	font-weight:bold; font-family:sans-serif; font-size:10px;
--- a/templates/static/style.css	Thu Jul 26 07:56:27 2007 -0400
+++ b/templates/static/style.css	Wed Feb 06 19:57:52 2008 -0800
@@ -1,5 +1,6 @@
 a { text-decoration:none; }
 .age { white-space:nowrap; }
+.date { white-space:nowrap; }
 .indexlinks { white-space:nowrap; }
 .parity0 { background-color: #dddddd; }
 .parity1 { background-color: #eeeeee; }
--- a/templates/tags.tmpl	Thu Jul 26 07:56:27 2007 -0400
+++ b/templates/tags.tmpl	Wed Feb 06 19:57:52 2008 -0800
@@ -1,5 +1,7 @@
 #header#
 <title>#repo|escape#: tags</title>
+<link rel="alternate" type="application/atom+xml"
+   href="#url#atom-tags" title="Atom feed for #repo|escape#: tags">
 <link rel="alternate" type="application/rss+xml"
    href="#url#rss-tags" title="RSS feed for #repo|escape#: tags">
 </head>
@@ -10,6 +12,7 @@
 <a href="#url#shortlog{sessionvars%urlparameter}">shortlog</a>
 <a href="#url#file/#node|short#/{sessionvars%urlparameter}">manifest</a>
 <a type="application/rss+xml" href="#url#rss-tags">rss</a>
+<a type="application/atom+xml" href="#url#atom-tags">atom</a>
 </div>
 
 <h2>tags:</h2>
--- a/tests/coverage.py	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/coverage.py	Wed Feb 06 19:57:52 2008 -0800
@@ -22,15 +22,20 @@
 # interface and limitations.  See [GDR 2001-12-04b] for requirements and
 # design.
 
-"""Usage:
+r"""Usage:
 
-coverage.py -x MODULE.py [ARG1 ARG2 ...]
+coverage.py -x [-p] MODULE.py [ARG1 ARG2 ...]
     Execute module, passing the given command-line arguments, collecting
-    coverage data.
+    coverage data. With the -p option, write to a temporary file containing
+    the machine name and process ID.
 
 coverage.py -e
     Erase collected coverage data.
 
+coverage.py -c
+    Collect data from multiple coverage files (as created by -p option above)
+    and store it into a single file representing the union of the coverage.
+
 coverage.py -r [-m] [-o dir1,dir2,...] FILE1 FILE2 ...
     Report on the statement coverage for the given files.  With the -m
     option, show line numbers of the statements that weren't executed.
@@ -49,16 +54,26 @@
 Coverage data is saved in the file .coverage by default.  Set the
 COVERAGE_FILE environment variable to save it somewhere else."""
 
-__version__ = "2.5.20051204"    # see detailed history at the end of this file.
+__version__ = "2.77.20070729"    # see detailed history at the end of this file.
 
 import compiler
 import compiler.visitor
+import glob
 import os
 import re
 import string
+import symbol
 import sys
 import threading
+import token
 import types
+from socket import gethostname
+
+# Python version compatibility
+try:
+    strclass = basestring   # new to 2.3
+except:
+    strclass = str
 
 # 2. IMPLEMENTATION
 #
@@ -81,6 +96,9 @@
 # names to increase speed.
 
 class StatementFindingAstVisitor(compiler.visitor.ASTVisitor):
+    """ A visitor for a parsed Abstract Syntax Tree which finds executable
+        statements.
+    """
     def __init__(self, statements, excluded, suite_spots):
         compiler.visitor.ASTVisitor.__init__(self)
         self.statements = statements
@@ -89,7 +107,6 @@
         self.excluding_suite = 0
 
     def doRecursive(self, node):
-        self.recordNodeLine(node)
         for n in node.getChildNodes():
             self.dispatch(n)
 
@@ -98,7 +115,9 @@
     def doCode(self, node):
         if hasattr(node, 'decorators') and node.decorators:
             self.dispatch(node.decorators)
-        self.doSuite(node, node.code)
+            self.recordAndDispatch(node.code)
+        else:
+            self.doSuite(node, node.code)
 
     visitFunction = visitClass = doCode
 
@@ -123,12 +142,35 @@
     def doStatement(self, node):
         self.recordLine(self.getFirstLine(node))
 
-    visitAssert = visitAssign = visitAssTuple = visitDiscard = visitPrint = \
+    visitAssert = visitAssign = visitAssTuple = visitPrint = \
         visitPrintnl = visitRaise = visitSubscript = visitDecorators = \
         doStatement
 
+    def visitPass(self, node):
+        # Pass statements have weird interactions with docstrings.  If this
+        # pass statement is part of one of those pairs, claim that the statement
+        # is on the later of the two lines.
+        l = node.lineno
+        if l:
+            lines = self.suite_spots.get(l, [l,l])
+            self.statements[lines[1]] = 1
+
+    def visitDiscard(self, node):
+        # Discard nodes are statements that execute an expression, but then
+        # discard the results.  This includes function calls, so we can't
+        # ignore them all.  But if the expression is a constant, the statement
+        # won't be "executed", so don't count it now.
+        if node.expr.__class__.__name__ != 'Const':
+            self.doStatement(node)
+
     def recordNodeLine(self, node):
-        return self.recordLine(node.lineno)
+        # Stmt nodes often have None, but shouldn't claim the first line of
+        # their children (because the first child might be an ignorable line
+        # like "global a").
+        if node.__class__.__name__ != 'Stmt':
+            return self.recordLine(self.getFirstLine(node))
+        else:
+            return 0
 
     def recordLine(self, lineno):
         # Returns a bool, whether the line is included or excluded.
@@ -137,16 +179,16 @@
             # keyword.
             if lineno in self.suite_spots:
                 lineno = self.suite_spots[lineno][0]
-            # If we're inside an exluded suite, record that this line was
+            # If we're inside an excluded suite, record that this line was
             # excluded.
             if self.excluding_suite:
                 self.excluded[lineno] = 1
                 return 0
             # If this line is excluded, or suite_spots maps this line to
             # another line that is exlcuded, then we're excluded.
-            elif self.excluded.has_key(lineno) or \
-                 self.suite_spots.has_key(lineno) and \
-                 self.excluded.has_key(self.suite_spots[lineno][1]):
+            elif lineno in self.excluded or \
+                 lineno in self.suite_spots and \
+                 self.suite_spots[lineno][1] in self.excluded:
                 return 0
             # Otherwise, this is an executable line.
             else:
@@ -175,8 +217,8 @@
         lastprev = self.getLastLine(prevsuite)
         firstelse = self.getFirstLine(suite)
         for l in range(lastprev+1, firstelse):
-            if self.suite_spots.has_key(l):
-                self.doSuite(None, suite, exclude=self.excluded.has_key(l))
+            if l in self.suite_spots:
+                self.doSuite(None, suite, l in exclude=self.excluded)
                 break
         else:
             self.doSuite(None, suite)
@@ -189,6 +231,8 @@
         self.doSuite(node, node.body)
         self.doElse(node.body, node)
 
+    visitWhile = visitFor
+
     def visitIf(self, node):
         # The first test has to be handled separately from the rest.
         # The first test is credited to the line with the "if", but the others
@@ -198,10 +242,6 @@
             self.doSuite(t, n)
         self.doElse(node.tests[-1][1], node)
 
-    def visitWhile(self, node):
-        self.doSuite(node, node.body)
-        self.doElse(node.body, node)
-
     def visitTryExcept(self, node):
         self.doSuite(node, node.body)
         for i in range(len(node.handlers)):
@@ -221,6 +261,9 @@
         self.doSuite(node, node.body)
         self.doPlainWordSuite(node.body, node.final)
 
+    def visitWith(self, node):
+        self.doSuite(node, node.body)
+
     def visitGlobal(self, node):
         # "global" statements don't execute like others (they don't call the
         # trace function), so don't record their line numbers.
@@ -228,9 +271,9 @@
 
 the_coverage = None
 
+class CoverageException(Exception): pass
+
 class coverage:
-    error = "coverage error"
-
     # Name of the cache file (unless environment variable is set).
     cache_default = ".coverage"
 
@@ -257,14 +300,16 @@
     def __init__(self):
         global the_coverage
         if the_coverage:
-            raise self.error, "Only one coverage object allowed."
+            raise CoverageException, "Only one coverage object allowed."
         self.usecache = 1
         self.cache = None
+        self.parallel_mode = False
         self.exclude_re = ''
         self.nesting = 0
         self.cstack = []
         self.xstack = []
-        self.relative_dir = os.path.normcase(os.path.abspath(os.curdir)+os.path.sep)
+        self.relative_dir = os.path.normcase(os.path.abspath(os.curdir)+os.sep)
+        self.exclude('# *pragma[: ]*[nN][oO] *[cC][oO][vV][eE][rR]')
 
     # t(f, x, y).  This method is passed to sys.settrace as a trace function.
     # See [van Rossum 2001-07-20b, 9.2] for an explanation of sys.settrace and
@@ -272,102 +317,120 @@
     # See [van Rossum 2001-07-20a, 3.2] for a description of frame and code
     # objects.
 
-    def t(self, f, w, a):                                   #pragma: no cover
-        #print w, f.f_code.co_filename, f.f_lineno
+    def t(self, f, w, unused):                                   #pragma: no cover
         if w == 'line':
+            #print "Executing %s @ %d" % (f.f_code.co_filename, f.f_lineno)
             self.c[(f.f_code.co_filename, f.f_lineno)] = 1
             for c in self.cstack:
                 c[(f.f_code.co_filename, f.f_lineno)] = 1
         return self.t
 
-    def help(self, error=None):
+    def help(self, error=None):     #pragma: no cover
         if error:
             print error
             print
         print __doc__
         sys.exit(1)
 
-    def command_line(self):
+    def command_line(self, argv, help_fn=None):
         import getopt
+        help_fn = help_fn or self.help
         settings = {}
         optmap = {
             '-a': 'annotate',
+            '-c': 'collect',
             '-d:': 'directory=',
             '-e': 'erase',
             '-h': 'help',
             '-i': 'ignore-errors',
             '-m': 'show-missing',
+            '-p': 'parallel-mode',
             '-r': 'report',
             '-x': 'execute',
-            '-o': 'omit=',
+            '-o:': 'omit=',
             }
         short_opts = string.join(map(lambda o: o[1:], optmap.keys()), '')
         long_opts = optmap.values()
-        options, args = getopt.getopt(sys.argv[1:], short_opts, long_opts)
+        options, args = getopt.getopt(argv, short_opts, long_opts)
         for o, a in options:
-            if optmap.has_key(o):
+            if o in optmap:
                 settings[optmap[o]] = 1
-            elif optmap.has_key(o + ':'):
+            elif o + ':' in optmap:
                 settings[optmap[o + ':']] = a
             elif o[2:] in long_opts:
                 settings[o[2:]] = 1
             elif o[2:] + '=' in long_opts:
-                settings[o[2:]] = a
-            else:
-                self.help("Unknown option: '%s'." % o)
+                settings[o[2:]+'='] = a
+            else:       #pragma: no cover
+                pass    # Can't get here, because getopt won't return anything unknown.
+
         if settings.get('help'):
-            self.help()
+            help_fn()
+
         for i in ['erase', 'execute']:
-            for j in ['annotate', 'report']:
+            for j in ['annotate', 'report', 'collect']:
                 if settings.get(i) and settings.get(j):
-                    self.help("You can't specify the '%s' and '%s' "
+                    help_fn("You can't specify the '%s' and '%s' "
                               "options at the same time." % (i, j))
+
         args_needed = (settings.get('execute')
                        or settings.get('annotate')
                        or settings.get('report'))
-        action = settings.get('erase') or args_needed
+        action = (settings.get('erase')
+                  or settings.get('collect')
+                  or args_needed)
         if not action:
-            self.help("You must specify at least one of -e, -x, -r, or -a.")
+            help_fn("You must specify at least one of -e, -x, -c, -r, or -a.")
         if not args_needed and args:
-            self.help("Unexpected arguments %s." % args)
+            help_fn("Unexpected arguments: %s" % " ".join(args))
 
+        self.parallel_mode = settings.get('parallel-mode')
         self.get_ready()
-        self.exclude('#pragma[: ]+[nN][oO] [cC][oO][vV][eE][rR]')
 
         if settings.get('erase'):
             self.erase()
         if settings.get('execute'):
             if not args:
-                self.help("Nothing to do.")
+                help_fn("Nothing to do.")
             sys.argv = args
             self.start()
             import __main__
             sys.path[0] = os.path.dirname(sys.argv[0])
             execfile(sys.argv[0], __main__.__dict__)
+        if settings.get('collect'):
+            self.collect()
         if not args:
             args = self.cexecuted.keys()
+
         ignore_errors = settings.get('ignore-errors')
         show_missing = settings.get('show-missing')
-        directory = settings.get('directory')
-        omit = filter(None, settings.get('omit', '').split(','))
-        omit += ['/<'] # Always skip /<string> etc.
+        directory = settings.get('directory=')
+
+        omit = settings.get('omit=')
+        if omit is not None:
+            omit = omit.split(',')
+        else:
+            omit = []
 
         if settings.get('report'):
             self.report(args, show_missing, ignore_errors, omit_prefixes=omit)
         if settings.get('annotate'):
             self.annotate(args, directory, ignore_errors, omit_prefixes=omit)
 
-    def use_cache(self, usecache):
+    def use_cache(self, usecache, cache_file=None):
         self.usecache = usecache
+        if cache_file and not self.cache:
+            self.cache_default = cache_file
 
-    def get_ready(self):
+    def get_ready(self, parallel_mode=False):
         if self.usecache and not self.cache:
-            self.cache = os.path.abspath(os.environ.get(self.cache_env,
-                                                        self.cache_default))
+            self.cache = os.environ.get(self.cache_env, self.cache_default)
+            if self.parallel_mode:
+                self.cache += "." + gethostname() + "." + str(os.getpid())
             self.restore()
         self.analysis_cache = {}
 
-    def start(self):
+    def start(self, parallel_mode=False):
         self.get_ready()
         if self.nesting == 0:                               #pragma: no cover
             sys.settrace(self.t)
@@ -383,12 +446,12 @@
                 threading.settrace(None)
 
     def erase(self):
+        self.get_ready()
         self.c = {}
         self.analysis_cache = {}
         self.cexecuted = {}
         if self.cache and os.path.exists(self.cache):
             os.remove(self.cache)
-        self.exclude_re = ""
 
     def exclude(self, re):
         if self.exclude_re:
@@ -406,8 +469,6 @@
     # save().  Save coverage data to the coverage cache.
 
     def save(self):
-        # move to directory that must exist.
-        os.chdir(os.sep)
         if self.usecache and self.cache:
             self.canonicalize_filenames()
             cache = open(self.cache, 'wb')
@@ -421,24 +482,52 @@
         self.c = {}
         self.cexecuted = {}
         assert self.usecache
-        if not os.path.exists(self.cache):
-            return
+        if os.path.exists(self.cache):
+            self.cexecuted = self.restore_file(self.cache)
+
+    def restore_file(self, file_name):
         try:
-            cache = open(self.cache, 'rb')
+            cache = open(file_name, 'rb')
             import marshal
             cexecuted = marshal.load(cache)
             cache.close()
             if isinstance(cexecuted, types.DictType):
-                self.cexecuted = cexecuted
+                return cexecuted
+            else:
+                return {}
         except:
-            pass
+            return {}
+
+    # collect(). Collect data in multiple files produced by parallel mode
+
+    def collect(self):
+        cache_dir, local = os.path.split(self.cache)
+        for f in os.listdir(cache_dir or '.'):
+            if not f.startswith(local):
+                continue
+
+            full_path = os.path.join(cache_dir, f)
+            cexecuted = self.restore_file(full_path)
+            self.merge_data(cexecuted)
+
+    def merge_data(self, new_data):
+        for file_name, file_data in new_data.items():
+            if file_name in self.cexecuted:
+                self.merge_file_data(self.cexecuted[file_name], file_data)
+            else:
+                self.cexecuted[file_name] = file_data
+
+    def merge_file_data(self, cache_data, new_data):
+        for line_number in new_data.keys():
+            if not line_number in cache_data:
+                cache_data[line_number] = new_data[line_number]
 
     # canonical_filename(filename).  Return a canonical filename for the
     # file (that is, an absolute path with no redundant components and
     # normalized case).  See [GDR 2001-12-04b, 3.3].
 
     def canonical_filename(self, filename):
-        if not self.canonical_filename_cache.has_key(filename):
+        if not filename in self.canonical_filename_cache:
             f = filename
             if os.path.isabs(f) and not os.path.exists(f):
                 f = os.path.basename(f)
@@ -457,8 +546,11 @@
 
     def canonicalize_filenames(self):
         for filename, lineno in self.c.keys():
+            if filename == '<string>':
+                # Can't do anything useful with exec'd strings, so skip them.
+                continue
             f = self.canonical_filename(filename)
-            if not self.cexecuted.has_key(f):
+            if not f in self.cexecuted:
                 self.cexecuted[f] = {}
             self.cexecuted[f][lineno] = 1
         self.c = {}
@@ -468,49 +560,90 @@
     def morf_filename(self, morf):
         if isinstance(morf, types.ModuleType):
             if not hasattr(morf, '__file__'):
-                raise self.error, "Module has no __file__ attribute."
-            file = morf.__file__
+                raise CoverageException, "Module has no __file__ attribute."
+            f = morf.__file__
         else:
-            file = morf
-        return self.canonical_filename(file)
+            f = morf
+        return self.canonical_filename(f)
 
     # analyze_morf(morf).  Analyze the module or filename passed as
     # the argument.  If the source code can't be found, raise an error.
     # Otherwise, return a tuple of (1) the canonical filename of the
     # source code for the module, (2) a list of lines of statements
-    # in the source code, and (3) a list of lines of excluded statements.
+    # in the source code, (3) a list of lines of excluded statements,
+    # and (4), a map of line numbers to multi-line line number ranges, for
+    # statements that cross lines.
 
     def analyze_morf(self, morf):
-        if self.analysis_cache.has_key(morf):
+        if morf in self.analysis_cache:
             return self.analysis_cache[morf]
         filename = self.morf_filename(morf)
         ext = os.path.splitext(filename)[1]
         if ext == '.pyc':
             if not os.path.exists(filename[0:-1]):
-                raise self.error, ("No source for compiled code '%s'."
+                raise CoverageException, ("No source for compiled code '%s'."
                                    % filename)
             filename = filename[0:-1]
         elif ext != '.py':
-            raise self.error, "File '%s' not Python source." % filename
+            raise CoverageException, "File '%s' not Python source." % filename
         source = open(filename, 'r')
-        lines, excluded_lines = self.find_executable_statements(
+        lines, excluded_lines, line_map = self.find_executable_statements(
             source.read(), exclude=self.exclude_re
             )
         source.close()
-        result = filename, lines, excluded_lines
+        result = filename, lines, excluded_lines, line_map
         self.analysis_cache[morf] = result
         return result
 
+    def first_line_of_tree(self, tree):
+        while True:
+            if len(tree) == 3 and type(tree[2]) == type(1):
+                return tree[2]
+            tree = tree[1]
+
+    def last_line_of_tree(self, tree):
+        while True:
+            if len(tree) == 3 and type(tree[2]) == type(1):
+                return tree[2]
+            tree = tree[-1]
+
+    def find_docstring_pass_pair(self, tree, spots):
+        for i in range(1, len(tree)):
+            if self.is_string_constant(tree[i]) and self.is_pass_stmt(tree[i+1]):
+                first_line = self.first_line_of_tree(tree[i])
+                last_line = self.last_line_of_tree(tree[i+1])
+                self.record_multiline(spots, first_line, last_line)
+
+    def is_string_constant(self, tree):
+        try:
+            return tree[0] == symbol.stmt and tree[1][1][1][0] == symbol.expr_stmt
+        except:
+            return False
+
+    def is_pass_stmt(self, tree):
+        try:
+            return tree[0] == symbol.stmt and tree[1][1][1][0] == symbol.pass_stmt
+        except:
+            return False
+
+    def record_multiline(self, spots, i, j):
+        for l in range(i, j+1):
+            spots[l] = (i, j)
+
     def get_suite_spots(self, tree, spots):
-        import symbol, token
+        """ Analyze a parse tree to find suite introducers which span a number
+            of lines.
+        """
         for i in range(1, len(tree)):
-            if isinstance(tree[i], tuple):
+            if type(tree[i]) == type(()):
                 if tree[i][0] == symbol.suite:
                     # Found a suite, look back for the colon and keyword.
                     lineno_colon = lineno_word = None
                     for j in range(i-1, 0, -1):
                         if tree[j][0] == token.COLON:
-                            lineno_colon = tree[j][2]
+                            # Colons are never executed themselves: we want the
+                            # line number of the last token before the colon.
+                            lineno_colon = self.last_line_of_tree(tree[j-1])
                         elif tree[j][0] == token.NAME:
                             if tree[j][1] == 'elif':
                                 # Find the line number of the first non-terminal
@@ -532,8 +665,18 @@
                     if lineno_colon and lineno_word:
                         # Found colon and keyword, mark all the lines
                         # between the two with the two line numbers.
-                        for l in range(lineno_word, lineno_colon+1):
-                            spots[l] = (lineno_word, lineno_colon)
+                        self.record_multiline(spots, lineno_word, lineno_colon)
+
+                    # "pass" statements are tricky: different versions of Python
+                    # treat them differently, especially in the common case of a
+                    # function with a doc string and a single pass statement.
+                    self.find_docstring_pass_pair(tree[i], spots)
+
+                elif tree[i][0] == symbol.simple_stmt:
+                    first_line = self.first_line_of_tree(tree[i])
+                    last_line = self.last_line_of_tree(tree[i])
+                    if first_line != last_line:
+                        self.record_multiline(spots, first_line, last_line)
                 self.get_suite_spots(tree[i], spots)
 
     def find_executable_statements(self, text, exclude=None):
@@ -547,9 +690,12 @@
                 if reExclude.search(lines[i]):
                     excluded[i+1] = 1
 
+        # Parse the code and analyze the parse tree to find out which statements
+        # are multiline, and where suites begin and end.
         import parser
         tree = parser.suite(text+'\n\n').totuple(1)
         self.get_suite_spots(tree, suite_spots)
+        #print "Suite spots:", suite_spots
 
         # Use the compiler module to parse the text and find the executable
         # statements.  We add newlines to be impervious to final partial lines.
@@ -562,7 +708,7 @@
         lines.sort()
         excluded_lines = excluded.keys()
         excluded_lines.sort()
-        return lines, excluded_lines
+        return lines, excluded_lines, suite_spots
 
     # format_lines(statements, lines).  Format a list of line numbers
     # for printing by coalescing groups of lines as long as the lines
@@ -595,7 +741,8 @@
                 return "%d" % start
             else:
                 return "%d-%d" % (start, end)
-        return string.join(map(stringify, pairs), ", ")
+        ret = string.join(map(stringify, pairs), ", ")
+        return ret
 
     # Backward compatibility with version 1.
     def analysis(self, morf):
@@ -603,13 +750,17 @@
         return f, s, m, mf
 
     def analysis2(self, morf):
-        filename, statements, excluded = self.analyze_morf(morf)
+        filename, statements, excluded, line_map = self.analyze_morf(morf)
         self.canonicalize_filenames()
-        if not self.cexecuted.has_key(filename):
+        if not filename in self.cexecuted:
             self.cexecuted[filename] = {}
         missing = []
         for line in statements:
-            if not self.cexecuted[filename].has_key(line):
+            lines = line_map.get(line, [line, line])
+            for l in range(lines[0], lines[1]+1):
+                if l in self.cexecuted[filename]:
+                    break
+            else:
                 missing.append(line)
         return (filename, statements, excluded, missing,
                 self.format_lines(statements, missing))
@@ -647,6 +798,15 @@
     def report(self, morfs, show_missing=1, ignore_errors=0, file=None, omit_prefixes=[]):
         if not isinstance(morfs, types.ListType):
             morfs = [morfs]
+        # On windows, the shell doesn't expand wildcards.  Do it here.
+        globbed = []
+        for morf in morfs:
+            if isinstance(morf, strclass):
+                globbed.extend(glob.glob(morf))
+            else:
+                globbed.append(morf)
+        morfs = globbed
+
         morfs = self.filter_by_prefix(morfs, omit_prefixes)
         morfs.sort(self.morf_name_compare)
 
@@ -684,8 +844,8 @@
                 raise
             except:
                 if not ignore_errors:
-                    type, msg = sys.exc_info()[0:2]
-                    print >>file, fmt_err % (name, type, msg)
+                    typ, msg = sys.exc_info()[0:2]
+                    print >>file, fmt_err % (name, typ, msg)
         if len(morfs) > 1:
             print >>file, "-" * len(header)
             if total_statements > 0:
@@ -765,18 +925,41 @@
 the_coverage = coverage()
 
 # Module functions call methods in the singleton object.
-def use_cache(*args, **kw): return the_coverage.use_cache(*args, **kw)
-def start(*args, **kw): return the_coverage.start(*args, **kw)
-def stop(*args, **kw): return the_coverage.stop(*args, **kw)
-def erase(*args, **kw): return the_coverage.erase(*args, **kw)
-def begin_recursive(*args, **kw): return the_coverage.begin_recursive(*args, **kw)
-def end_recursive(*args, **kw): return the_coverage.end_recursive(*args, **kw)
-def exclude(*args, **kw): return the_coverage.exclude(*args, **kw)
-def analysis(*args, **kw): return the_coverage.analysis(*args, **kw)
-def analysis2(*args, **kw): return the_coverage.analysis2(*args, **kw)
-def report(*args, **kw): return the_coverage.report(*args, **kw)
-def annotate(*args, **kw): return the_coverage.annotate(*args, **kw)
-def annotate_file(*args, **kw): return the_coverage.annotate_file(*args, **kw)
+def use_cache(*args, **kw):
+    return the_coverage.use_cache(*args, **kw)
+
+def start(*args, **kw):
+    return the_coverage.start(*args, **kw)
+
+def stop(*args, **kw):
+    return the_coverage.stop(*args, **kw)
+
+def erase(*args, **kw):
+    return the_coverage.erase(*args, **kw)
+
+def begin_recursive(*args, **kw):
+    return the_coverage.begin_recursive(*args, **kw)
+
+def end_recursive(*args, **kw):
+    return the_coverage.end_recursive(*args, **kw)
+
+def exclude(*args, **kw):
+    return the_coverage.exclude(*args, **kw)
+
+def analysis(*args, **kw):
+    return the_coverage.analysis(*args, **kw)
+
+def analysis2(*args, **kw):
+    return the_coverage.analysis2(*args, **kw)
+
+def report(*args, **kw):
+    return the_coverage.report(*args, **kw)
+
+def annotate(*args, **kw):
+    return the_coverage.annotate(*args, **kw)
+
+def annotate_file(*args, **kw):
+    return the_coverage.annotate_file(*args, **kw)
 
 # Save coverage data when Python exits.  (The atexit module wasn't
 # introduced until Python 2.0, so use sys.exitfunc when it's not
@@ -789,7 +972,7 @@
 
 # Command-line interface.
 if __name__ == '__main__':
-    the_coverage.command_line()
+    the_coverage.command_line(sys.argv[1:])
 
 
 # A. REFERENCES
@@ -858,10 +1041,46 @@
 # 2005-12-04 NMB Adapted Greg Rogers' patch for using relative filenames,
 # and sorting and omitting files to report on.
 #
+# 2006-07-23 NMB Applied Joseph Tate's patch for function decorators.
+#
+# 2006-08-21 NMB Applied Sigve Tjora and Mark van der Wal's fixes for argument
+# handling.
+#
+# 2006-08-22 NMB Applied Geoff Bache's parallel mode patch.
+#
+# 2006-08-23 NMB Refactorings to improve testability.  Fixes to command-line
+# logic for parallel mode and collect.
+#
+# 2006-08-25 NMB "#pragma: nocover" is excluded by default.
+#
+# 2006-09-10 NMB Properly ignore docstrings and other constant expressions that
+# appear in the middle of a function, a problem reported by Tim Leslie.
+# Minor changes to avoid lint warnings.
+#
+# 2006-09-17 NMB coverage.erase() shouldn't clobber the exclude regex.
+# Change how parallel mode is invoked, and fix erase() so that it erases the
+# cache when called programmatically.
+#
+# 2007-07-21 NMB In reports, ignore code executed from strings, since we can't
+# do anything useful with it anyway.
+# Better file handling on Linux, thanks Guillaume Chazarain.
+# Better shell support on Windows, thanks Noel O'Boyle.
+# Python 2.2 support maintained, thanks Catherine Proulx.
+#
+# 2007-07-22 NMB Python 2.5 now fully supported. The method of dealing with
+# multi-line statements is now less sensitive to the exact line that Python
+# reports during execution. Pass statements are handled specially so that their
+# disappearance during execution won't throw off the measurement.
+#
+# 2007-07-23 NMB Now Python 2.5 is *really* fully supported: the body of the
+# new with statement is counted as executable.
+#
+# 2007-07-29 NMB Better packaging.
+
 # C. COPYRIGHT AND LICENCE
 #
 # Copyright 2001 Gareth Rees.  All rights reserved.
-# Copyright 2004-2005 Ned Batchelder.  All rights reserved.
+# Copyright 2004-2007 Ned Batchelder.  All rights reserved.
 #
 # Redistribution and use in source and binary forms, with or without
 # modification, are permitted provided that the following conditions are
@@ -888,4 +1107,4 @@
 # USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
 # DAMAGE.
 #
-# $Id: coverage.py 26 2005-12-04 18:42:44Z ned $
+# $Id: coverage.py 74 2007-07-29 22:28:35Z nedbat $
--- a/tests/get-with-headers.py	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/get-with-headers.py	Wed Feb 06 19:57:52 2008 -0800
@@ -14,3 +14,7 @@
         print "%s: %s" % (h, response.getheader(h))
 print
 sys.stdout.write(response.read())
+
+if 200 <= response.status <= 299:
+    sys.exit(0)
+sys.exit(1)
--- a/tests/hghave	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/hghave	Wed Feb 06 19:57:52 2008 -0800
@@ -1,15 +1,102 @@
 #!/usr/bin/env python
 """Test the running system for features availability. Exit with zero
-if all features are there, non-zero otherwise.
+if all features are there, non-zero otherwise. If a feature name is
+prefixed with "no-", the absence of feature is tested.
 """
 import optparse
 import os
+import re
 import sys
+import tempfile
+
+tempprefix = 'hg-hghave-'
+
+def matchoutput(cmd, regexp, ignorestatus=False):
+    """Return True if cmd executes successfully and its output
+    is matched by the supplied regular expression.
+    """
+    r = re.compile(regexp)
+    fh = os.popen(cmd)
+    s = fh.read()
+    ret = fh.close()
+    return (ignorestatus or ret is None) and r.search(s)
+
+def has_cvs():
+    return matchoutput('cvs --version 2>&1', r'Concurrent Versions System')
+
+def has_cvsps():
+    return matchoutput('cvsps -h -q 2>&1', r'cvsps version', True)
+
+def has_darcs():
+    return matchoutput('darcs', 'darcs version', True)
+
+def has_eol_in_paths():
+    try:
+        fd, path = tempfile.mkstemp(prefix=tempprefix, suffix='\n\r')
+        os.close(fd)
+        os.remove(path)
+        return True
+    except:
+        return False
+
+def has_executablebit():
+    fd, path = tempfile.mkstemp(prefix=tempprefix)
+    os.close(fd)
+    try:
+        s = os.lstat(path).st_mode
+        os.chmod(path, s | 0100)
+        return (os.lstat(path).st_mode & 0100 != 0)
+    finally:
+        os.remove(path)
+
+def has_fifo():
+    return hasattr(os, "mkfifo")
+
+def has_hotshot():
+    try:
+        # hotshot.stats tests hotshot and many problematic dependencies
+        # like profile.
+        import hotshot.stats
+        return True
+    except ImportError:
+        return False
+
+def has_lsprof():
+    try:
+        import _lsprof
+        return True
+    except ImportError:
+        return False
+
+def has_git():
+    return matchoutput('git --version 2>&1', r'^git version')
+
+def has_svn():
+    return matchoutput('svn --version 2>&1', r'^svn, version') and \
+        matchoutput('svnadmin --version 2>&1', r'^svnadmin, version')
+
+def has_svn_bindings():
+    try:
+        import svn.core
+        return True
+    except ImportError:
+        return False
 
 def has_symlink():
     return hasattr(os, "symlink")
 
 checks = {
+    "cvs": (has_cvs, "cvs client"),
+    "cvsps": (has_cvsps, "cvsps utility"),
+    "darcs": (has_darcs, "darcs client"),
+    "eol-in-paths": (has_eol_in_paths, "end-of-lines in paths"),
+    "execbit": (has_executablebit, "executable bit"),
+    "fifo": (has_fifo, "named pipes"),
+    "git": (has_git, "git command line client"),
+    "hotshot": (has_hotshot, "python hotshot module"),
+    "lsprof": (has_lsprof, "python lsprof module"),
+    "svn": (has_svn, "subversion client and admin tools"),
+    "svn-bindings": (has_svn_bindings, "subversion python bindings"),
     "symlink": (has_symlink, "symbolic links"),
 }
 
@@ -29,7 +116,7 @@
     if options.list_features:
         list_features()
         sys.exit(0)
-        
+
     quiet = options.quiet
 
     failures = 0
@@ -39,17 +126,23 @@
         if not quiet:
             sys.stderr.write(msg + '\n')
         failures += 1
-    
+
     for feature in args:
+        negate = feature.startswith('no-')
+        if negate:
+            feature = feature[3:]
+
         if feature not in checks:
-            error('hghave: unknown feature: ' + feature)
+            error('skipped: unknown feature: ' + feature)
             continue
-        
-        check, desc = checks[feature]       
-        if not check():
-            error('hghave: missing feature: ' + desc)
+
+        check, desc = checks[feature]
+        if not negate and not check():
+            error('skipped: missing feature: ' + desc)
+        elif negate and check():
+            error('skipped: system supports %s' % desc)
 
     if failures != 0:
         sys.exit(1)
 
-    
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/readlink.py	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,12 @@
+#!/usr/bin/env python
+
+import errno, os, sys
+
+for f in sys.argv[1:]:
+    try:
+        print f, '->', os.readlink(f)
+    except OSError, err:
+        if err.errno != errno.EINVAL: raise
+        print f, 'not a symlink'
+
+sys.exit(0)
--- a/tests/run-tests.py	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/run-tests.py	Wed Feb 06 19:57:52 2008 -0800
@@ -19,37 +19,62 @@
 import tempfile
 import time
 
-# hghave reserved exit code to skip test
+# reserved exit code to skip test (used by hghave)
 SKIPPED_STATUS = 80
+SKIPPED_PREFIX = 'skipped: '
 
 required_tools = ["python", "diff", "grep", "unzip", "gunzip", "bunzip2", "sed"]
 
 parser = optparse.OptionParser("%prog [options] [tests]")
-parser.add_option("-v", "--verbose", action="store_true",
-    help="output verbose messages")
-parser.add_option("-t", "--timeout", type="int",
-    help="kill errant tests after TIMEOUT seconds")
-parser.add_option("-c", "--cover", action="store_true",
-    help="print a test coverage report")
-parser.add_option("-s", "--cover_stdlib", action="store_true",
-    help="print a test coverage report inc. standard libraries")
 parser.add_option("-C", "--annotate", action="store_true",
     help="output files annotated with coverage")
-parser.add_option("-r", "--retest", action="store_true",
-    help="retest failed tests")
+parser.add_option("--child", type="int",
+    help="run as child process, summary to given fd")
+parser.add_option("-c", "--cover", action="store_true",
+    help="print a test coverage report")
 parser.add_option("-f", "--first", action="store_true",
     help="exit on the first test failure")
+parser.add_option("-i", "--interactive", action="store_true",
+    help="prompt to accept changed output")
+parser.add_option("-j", "--jobs", type="int",
+    help="number of jobs to run in parallel")
 parser.add_option("-R", "--restart", action="store_true",
     help="restart at last error")
-parser.add_option("-i", "--interactive", action="store_true",
-    help="prompt to accept changed output")
+parser.add_option("-p", "--port", type="int",
+    help="port on which servers should listen")
+parser.add_option("-r", "--retest", action="store_true",
+    help="retest failed tests")
+parser.add_option("-s", "--cover_stdlib", action="store_true",
+    help="print a test coverage report inc. standard libraries")
+parser.add_option("-t", "--timeout", type="int",
+    help="kill errant tests after TIMEOUT seconds")
+parser.add_option("--tmpdir", type="string",
+    help="run tests in the given temporary directory")
+parser.add_option("-v", "--verbose", action="store_true",
+    help="output verbose messages")
+parser.add_option("--with-hg", type="string",
+    help="test existing install at given location")
 
-parser.set_defaults(timeout=180)
+parser.set_defaults(jobs=1, port=20059, timeout=180)
 (options, args) = parser.parse_args()
 verbose = options.verbose
 coverage = options.cover or options.cover_stdlib or options.annotate
 python = sys.executable
 
+if options.jobs < 1:
+    print >> sys.stderr, 'ERROR: -j/--jobs must be positive'
+    sys.exit(1)
+if options.interactive and options.jobs > 1:
+    print >> sys.stderr, 'ERROR: cannot mix -interactive and --jobs > 1'
+    sys.exit(1)
+
+def rename(src, dst):
+    """Like os.rename(), trade atomicity and opened files friendliness
+    for existing destination support.
+    """
+    shutil.copy(src, dst)
+    os.remove(src)
+
 def vlog(*msg):
     if verbose:
         for m in msg:
@@ -75,10 +100,10 @@
     '''Extract missing/unknown features log lines as a list'''
     missing = []
     for line in lines:
-        if not line.startswith('hghave: '):
+        if not line.startswith(SKIPPED_PREFIX):
             continue
         line = line.splitlines()[0]
-        missing.append(line[8:])
+        missing.append(line[len(SKIPPED_PREFIX):])
 
     return missing
 
@@ -134,10 +159,12 @@
     vlog("# Performing temporary installation of HG")
     installerrs = os.path.join("tests", "install.err")
 
-    os.chdir("..") # Get back to hg root
+    # Run installer in hg root
+    os.chdir(os.path.join(os.path.dirname(sys.argv[0]), '..'))
     cmd = ('%s setup.py clean --all'
-           ' install --force --home="%s" --install-lib="%s" >%s 2>&1'
-           % (sys.executable, INST, PYTHONDIR, installerrs))
+           ' install --force --home="%s" --install-lib="%s"'
+           ' --install-scripts="%s" >%s 2>&1'
+           % (sys.executable, INST, PYTHONDIR, BINDIR, installerrs))
     vlog("# Running", cmd)
     if os.system(cmd) == 0:
         if not verbose:
@@ -151,7 +178,14 @@
     os.chdir(TESTDIR)
 
     os.environ["PATH"] = "%s%s%s" % (BINDIR, os.pathsep, os.environ["PATH"])
-    os.environ["PYTHONPATH"] = PYTHONDIR
+
+    pydir = os.pathsep.join([PYTHONDIR, TESTDIR])
+    pythonpath = os.environ.get("PYTHONPATH")
+    if pythonpath:
+        pythonpath = pydir + os.pathsep + pythonpath
+    else:
+        pythonpath = pydir
+    os.environ["PYTHONPATH"] = pythonpath
 
     use_correct_python()
 
@@ -227,23 +261,34 @@
             ret = proc.wait()
             if ret == 0:
                 ret = signal.SIGTERM << 8
+            output += ("\n### Abort: timeout after %d seconds.\n"
+                       % options.timeout)
     return ret, splitnewlines(output)
 
-def run_one(test):
+def run_one(test, skips):
     '''tristate output:
     None -> skipped
     True -> passed
     False -> failed'''
 
+    def skip(msg):
+        if not verbose:
+            skips.append((test, msg))
+        else:
+            print "\nSkipping %s: %s" % (test, msg)
+        return None
+
     vlog("# Test", test)
-    if not verbose:
-        sys.stdout.write('.')
-        sys.stdout.flush()
 
     # create a fresh hgrc
     hgrc = file(HGRCPATH, 'w+')
     hgrc.write('[ui]\n')
     hgrc.write('slash = True\n')
+    hgrc.write('[defaults]\n')
+    hgrc.write('backout = -d "0 0"\n')
+    hgrc.write('commit = -d "0 0"\n')
+    hgrc.write('debugrawcommit = -d "0 0"\n')
+    hgrc.write('tag = -d "0 0"\n')
     hgrc.close()
 
     err = os.path.join(TESTDIR, test+".err")
@@ -271,20 +316,17 @@
     elif lctest.endswith('.bat'):
         # do not run batch scripts on non-windows
         if os.name != 'nt':
-            print '\nSkipping %s: batch script' % test
-            return None
+            return skip("batch script")
         # To reliably get the error code from batch files on WinXP,
         # the "cmd /c call" prefix is needed. Grrr
         cmd = 'cmd /c call "%s"' % testpath
     else:
         # do not run shell scripts on windows
         if os.name == 'nt':
-            print '\nSkipping %s: shell script' % test
-            return None
+            return skip("shell script")
         # do not try to run non-executable programs
         if not os.access(testpath, os.X_OK):
-            print '\nSkipping %s: not executable' % test
-            return None
+            return skip("not executable")
         cmd = '"%s"' % testpath
 
     if options.timeout > 0:
@@ -314,13 +356,17 @@
         missing = extract_missing_features(out)
         if not missing:
             missing = ['irrelevant']
-        print '\nSkipping %s: %s' % (test, missing[-1])
+        skip(missing[-1])
     elif ret:
         print "\nERROR: %s failed with error code %d" % (test, ret)
     elif diffret:
         ret = diffret
 
-    if ret != 0 and not skipped: 
+    if not verbose:
+        sys.stdout.write(skipped and 's' or '.')
+        sys.stdout.flush()
+
+    if ret != 0 and not skipped:
         # Save errors to a file for diagnosis
         f = open(err, "wb")
         for line in out:
@@ -357,40 +403,101 @@
         return None
     return ret == 0
 
+if not options.child:
+    os.umask(022)
 
-os.umask(022)
-
-check_required_tools()
+    check_required_tools()
 
 # Reset some environment variables to well-known values so that
 # the tests produce repeatable output.
 os.environ['LANG'] = os.environ['LC_ALL'] = 'C'
 os.environ['TZ'] = 'GMT'
+os.environ["EMAIL"] = "Foo Bar <foo.bar@example.com>"
 
 TESTDIR = os.environ["TESTDIR"] = os.getcwd()
-HGTMP   = os.environ["HGTMP"]   = tempfile.mkdtemp("", "hgtests.")
-DAEMON_PIDS = os.environ["DAEMON_PIDS"] = os.path.join(HGTMP, 'daemon.pids')
-HGRCPATH = os.environ["HGRCPATH"] = os.path.join(HGTMP, '.hgrc')
+HGTMP = os.environ['HGTMP'] = tempfile.mkdtemp('', 'hgtests.', options.tmpdir)
+DAEMON_PIDS = None
+HGRCPATH = None
 
 os.environ["HGEDITOR"] = sys.executable + ' -c "import sys; sys.exit(0)"'
-os.environ["HGMERGE"]  = ('python "%s" -L my -L other'
-                          % os.path.join(TESTDIR, os.path.pardir, 'contrib',
-                                         'simplemerge'))
+os.environ["HGMERGE"] = "internal:merge"
 os.environ["HGUSER"]   = "test"
 os.environ["HGENCODING"] = "ascii"
 os.environ["HGENCODINGMODE"] = "strict"
+os.environ["HGPORT"] = str(options.port)
+os.environ["HGPORT1"] = str(options.port + 1)
+os.environ["HGPORT2"] = str(options.port + 2)
 
-vlog("# Using TESTDIR", TESTDIR)
-vlog("# Using HGTMP", HGTMP)
-
-INST = os.path.join(HGTMP, "install")
+if options.with_hg:
+    INST = options.with_hg
+else:
+    INST = os.path.join(HGTMP, "install")
 BINDIR = os.path.join(INST, "bin")
 PYTHONDIR = os.path.join(INST, "lib", "python")
 COVERAGE_FILE = os.path.join(TESTDIR, ".coverage")
 
-try:
+def run_children(tests):
+    if not options.with_hg:
+        install_hg()
+
+    optcopy = dict(options.__dict__)
+    optcopy['jobs'] = 1
+    optcopy['with_hg'] = INST
+    opts = []
+    for opt, value in optcopy.iteritems():
+        name = '--' + opt.replace('_', '-')
+        if value is True:
+            opts.append(name)
+        elif value is not None:
+            opts.append(name + '=' + str(value))
+
+    tests.reverse()
+    jobs = [[] for j in xrange(options.jobs)]
+    while tests:
+        for j in xrange(options.jobs):
+            if not tests: break
+            jobs[j].append(tests.pop())
+    fps = {}
+    for j in xrange(len(jobs)):
+        job = jobs[j]
+        if not job:
+            continue
+        rfd, wfd = os.pipe()
+        childopts = ['--child=%d' % wfd, '--port=%d' % (options.port + j * 3)]
+        cmdline = [python, sys.argv[0]] + opts + childopts + job
+        vlog(' '.join(cmdline))
+        fps[os.spawnvp(os.P_NOWAIT, cmdline[0], cmdline)] = os.fdopen(rfd, 'r')
+        os.close(wfd)
+    failures = 0
+    tested, skipped, failed = 0, 0, 0
+    skips = []
+    while fps:
+        pid, status = os.wait()
+        fp = fps.pop(pid)
+        l = fp.read().splitlines()
+        test, skip, fail = map(int, l[:3])
+        for s in l[3:]:
+            skips.append(s.split(" ", 1))
+        tested += test
+        skipped += skip
+        failed += fail
+        vlog('pid %d exited, status %d' % (pid, status))
+        failures |= status
+    print
+    for s in skips:
+        print "Skipped %s: %s" % (s[0], s[1])
+    print "# Ran %d tests, %d skipped, %d failed." % (
+        tested, skipped, failed)
+    sys.exit(failures != 0)
+
+def run_tests(tests):
+    global DAEMON_PIDS, HGRCPATH
+    DAEMON_PIDS = os.environ["DAEMON_PIDS"] = os.path.join(HGTMP, 'daemon.pids')
+    HGRCPATH = os.environ["HGRCPATH"] = os.path.join(HGTMP, '.hgrc')
+
     try:
-        install_hg()
+        if not options.with_hg:
+            install_hg()
 
         if options.timeout > 0:
             try:
@@ -405,18 +512,6 @@
         failed = 0
         skipped = 0
 
-        if len(args) == 0:
-            args = os.listdir(".")
-            args.sort()
-
-
-        tests = []
-        for test in args:
-            if (test.startswith("test-") and '~' not in test and
-                ('.' not in test or test.endswith('.py') or
-                 test.endswith('.bat'))):
-                tests.append(test)
-
         if options.restart:
             orig = list(tests)
             while tests:
@@ -427,11 +522,12 @@
                 print "running all tests"
                 tests = orig
 
+        skips = []
         for test in tests:
             if options.retest and not os.path.exists(test + ".err"):
                 skipped += 1
                 continue
-            ret = run_one(test)
+            ret = run_one(test, skips)
             if ret is None:
                 skipped += 1
             elif not ret:
@@ -439,7 +535,7 @@
                     print "Accept this change? [n] ",
                     answer = sys.stdin.readline().strip()
                     if answer.lower() in "y yes".split():
-                        os.rename(test + ".err", test + ".out")
+                        rename(test + ".err", test + ".out")
                         tested += 1
                         continue
                 failed += 1
@@ -447,15 +543,46 @@
                     break
             tested += 1
 
-        print "\n# Ran %d tests, %d skipped, %d failed." % (tested, skipped,
-                                                            failed)
+        if options.child:
+            fp = os.fdopen(options.child, 'w')
+            fp.write('%d\n%d\n%d\n' % (tested, skipped, failed))
+            for s in skips:
+                fp.write("%s %s\n" % s)
+            fp.close()
+        else:
+            print
+            for s in skips:
+                print "Skipped %s: %s" % s
+            print "# Ran %d tests, %d skipped, %d failed." % (
+                tested, skipped, failed)
+
         if coverage:
             output_coverage()
     except KeyboardInterrupt:
         failed = True
         print "\ninterrupted!"
+
+    if failed:
+        sys.exit(1)
+
+if len(args) == 0:
+    args = os.listdir(".")
+    args.sort()
+
+tests = []
+for test in args:
+    if (test.startswith("test-") and '~' not in test and
+        ('.' not in test or test.endswith('.py') or
+         test.endswith('.bat'))):
+        tests.append(test)
+
+vlog("# Using TESTDIR", TESTDIR)
+vlog("# Using HGTMP", HGTMP)
+
+try:
+    if len(tests) > 1 and options.jobs > 1:
+        run_children(tests)
+    else:
+        run_tests(tests)
 finally:
     cleanup_exit()
-
-if failed:
-    sys.exit(1)
--- a/tests/test-acl.out	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-acl.out	Wed Feb 06 19:57:52 2008 -0800
@@ -28,6 +28,7 @@
 adding foo/file.txt revisions
 adding quux/file.py revisions
 added 3 changesets with 3 changes to 3 files
+updating the branch cache
 rolling back last transaction
 0:6675d58eff77
 
@@ -59,6 +60,7 @@
 acl: acl.allow not enabled
 acl: acl.deny not enabled
 acl: changes have source "push" - skipping
+updating the branch cache
 rolling back last transaction
 0:6675d58eff77
 
@@ -94,6 +96,7 @@
 acl: allowing changeset ef1ea85a6374
 acl: allowing changeset f9cafe1212c8
 acl: allowing changeset 911600dab2ae
+updating the branch cache
 rolling back last transaction
 0:6675d58eff77
 
@@ -383,6 +386,7 @@
 acl: allowing changeset ef1ea85a6374
 acl: allowing changeset f9cafe1212c8
 acl: allowing changeset 911600dab2ae
+updating the branch cache
 rolling back last transaction
 0:6675d58eff77
 
@@ -578,6 +582,7 @@
 acl: allowing changeset ef1ea85a6374
 acl: allowing changeset f9cafe1212c8
 acl: allowing changeset 911600dab2ae
+updating the branch cache
 rolling back last transaction
 0:6675d58eff77
 
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-add	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,48 @@
+#!/bin/sh
+
+hg init a
+cd a
+echo a > a
+hg add -n
+hg st
+hg add
+hg st
+
+echo b > b
+hg add -n b
+hg st
+hg add b || echo "failed to add b"
+hg st
+echo % should fail
+hg add b
+hg st
+
+hg ci -m 0
+echo % should fail
+hg add a
+
+echo aa > a
+hg ci -m 1
+hg up 0
+echo aaa > a
+hg ci -m 2
+
+hg merge
+hg st
+echo % should fail
+hg add a
+hg st
+hg ci -m merge
+
+echo % issue683
+hg rm a
+hg st
+echo a > a
+hg add a
+hg st
+
+hg add c && echo "unexpected addition of missing file"
+echo c > c
+hg add d c && echo "unexpected addition of missing file"
+hg st
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-add.out	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,38 @@
+adding a
+? a
+adding a
+A a
+A a
+? b
+A a
+A b
+% should fail
+b already tracked!
+A a
+A b
+% should fail
+a already tracked!
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+merging a
+warning: conflicts during merge.
+merging a failed!
+0 files updated, 0 files merged, 0 files removed, 1 files unresolved
+There are unresolved merges, you can redo the full merge using:
+  hg update -C 2
+  hg merge 1
+M a
+? a.orig
+% should fail
+a already tracked!
+M a
+? a.orig
+% issue683
+R a
+? a.orig
+M a
+? a.orig
+c does not exist!
+d does not exist!
+M a
+A c
+? a.orig
--- a/tests/test-alias	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-alias	Wed Feb 06 19:57:52 2008 -0800
@@ -1,6 +1,6 @@
 #!/bin/sh
 
-cat > $HGRCPATH <<EOF
+cat >> $HGRCPATH <<EOF
 [extensions]
 alias=
 
--- a/tests/test-archive	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-archive	Wed Feb 06 19:57:52 2008 -0800
@@ -13,16 +13,16 @@
 echo "[web]" >> .hg/hgrc
 echo "name = test-archive" >> .hg/hgrc
 echo "allow_archive = gz bz2, zip" >> .hg/hgrc
-hg serve -p 20059 -d --pid-file=hg.pid
+hg serve -p $HGPORT -d --pid-file=hg.pid -E errors.log
 cat hg.pid >> $DAEMON_PIDS
 
 TIP=`hg id -v | cut -f1 -d' '`
 QTIP=`hg id -q`
 cat > getarchive.py <<EOF
-import sys, urllib2
+import os, sys, urllib2
 node, archive = sys.argv[1:]
-f = urllib2.urlopen('http://127.0.0.1:20059/?cmd=archive;node=%s;type=%s'
-                    % (node, archive))
+f = urllib2.urlopen('http://127.0.0.1:%s/?cmd=archive;node=%s;type=%s'
+                    % (os.environ['HGPORT'], node, archive))
 sys.stdout.write(f.read())
 EOF
 http_proxy= python getarchive.py "$TIP" gz | gunzip | tar tf - | sed "s/$QTIP/TIP/"
@@ -69,8 +69,14 @@
     echo 'rev-0.tar created'
 fi
 
+hg archive -t bogus test.bogus
+
+echo % server errors
+cat errors.log
+
 echo '% empty repo'
 hg init ../empty
 cd ../empty
 hg archive ../test-empty
+
 exit 0
--- a/tests/test-archive-symlinks	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-archive-symlinks	Wed Feb 06 19:57:52 2008 -0800
@@ -1,19 +1,14 @@
 #!/bin/sh
 
+"$TESTDIR/hghave" symlink || exit 80
+
 origdir=`pwd`
 
-cat >> readlink.py <<EOF
-import os
-import sys
-
-for f in sys.argv[1:]:
-    print f, '->', os.readlink(f)
-EOF
-
 hg init repo
 cd repo
 ln -s nothing dangling
-hg ci -qAm 'add symlink'
+# avoid tar warnings about old timestamp
+hg ci -d '2000-01-01 00:00:00 +0000' -qAm 'add symlink'
 
 hg archive -t files ../archive
 hg archive -t tar -p tar ../archive.tar
@@ -22,16 +17,16 @@
 echo '% files'
 cd "$origdir"
 cd archive
-python ../readlink.py dangling
+$TESTDIR/readlink.py dangling
 
 echo '% tar'
 cd "$origdir"
 tar xf archive.tar
 cd tar
-python ../readlink.py dangling
+$TESTDIR/readlink.py dangling
 
 echo '% zip'
 cd "$origdir"
 unzip archive.zip > /dev/null
 cd zip
-python ../readlink.py dangling
+$TESTDIR/readlink.py dangling
--- a/tests/test-archive.out	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-archive.out	Wed Feb 06 19:57:52 2008 -0800
@@ -39,5 +39,7 @@
 test-TIP/baz/bletch
 test-TIP/foo
 rev-0.tar created
+abort: unknown archive type 'bogus'
+% server errors
 % empty repo
 abort: repository has no revisions
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-audit-path	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,23 @@
+#!/bin/sh
+
+hg init
+
+echo % should fail
+hg add .hg/00changelog.i
+
+mkdir a
+echo a > a/a
+hg ci -Ama
+ln -s a b
+echo b > a/b
+
+echo % should fail
+hg add b/b
+
+echo % should succeed
+hg add b
+
+echo % should still fail - maybe
+hg add b/b
+
+exit 0
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-audit-path.out	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,8 @@
+% should fail
+abort: path contains illegal component: .hg/00changelog.i
+adding a/a
+% should fail
+abort: path 'b/b' traverses symbolic link 'b'
+% should succeed
+% should still fail - maybe
+abort: path 'b/b' traverses symbolic link 'b'
--- a/tests/test-backout	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-backout	Wed Feb 06 19:57:52 2008 -0800
@@ -37,6 +37,22 @@
 hg backout -d '3 0' --merge tip
 cat a 2>/dev/null || echo cat: a: No such file or directory
 
+echo '# across branch'
+cd ..
+hg init branch
+cd branch
+echo a > a
+hg ci -Am0 -d '0 0'
+echo b > b
+hg ci -Am1 -d '0 0'
+hg co -C 0
+# should fail
+hg backout -d '0 0' 1
+echo c > c
+hg ci -Am2 -d '0 0'
+# should fail
+hg backout -d '0 0' 1
+
 echo '# backout with merge'
 cd ..
 hg init merge
--- a/tests/test-backout.out	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-backout.out	Wed Feb 06 19:57:52 2008 -0800
@@ -15,6 +15,13 @@
 removing a
 changeset 3:7f6d0f120113 backs out changeset 2:de31bdc76c0d
 cat: a: No such file or directory
+# across branch
+adding a
+adding b
+0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+abort: cannot back out change on a different branch
+adding c
+abort: cannot back out change on a different branch
 # backout with merge
 adding a
 reverting a
--- a/tests/test-bad-extension.out	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-bad-extension.out	Wed Feb 06 19:57:52 2008 -0800
@@ -1,5 +1,4 @@
 *** failed to import extension badext: bit bucket overflow
-extension 'hgext.gpg' overrides commands: sigs sigcheck sign
 hg help [COMMAND]
 
 show help for a command, extension, or list of commands
--- a/tests/test-bad-pull	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-bad-pull	Wed Feb 06 19:57:52 2008 -0800
@@ -1,15 +1,15 @@
 #!/bin/sh
 
-hg clone http://localhost:20059/ copy
+hg clone http://localhost:$HGPORT/ copy
 echo $?
 test -d copy || echo copy: No such file or directory
 
 cat > dumb.py <<EOF
-import BaseHTTPServer, SimpleHTTPServer, signal
+import BaseHTTPServer, SimpleHTTPServer, os, signal
 
 def run(server_class=BaseHTTPServer.HTTPServer,
         handler_class=SimpleHTTPServer.SimpleHTTPRequestHandler):
-    server_address = ('localhost', 20059)
+    server_address = ('localhost', int(os.environ['HGPORT']))
     httpd = server_class(server_address, handler_class)
     httpd.serve_forever()
 
@@ -23,7 +23,7 @@
 # give the server some time to start running
 sleep 1
 
-http_proxy= hg clone http://localhost:20059/foo copy2 2>&1 | \
+http_proxy= hg clone http://localhost:$HGPORT/foo copy2 2>&1 | \
     sed -e 's/404.*/404/' -e 's/Date:.*/Date:/'
 echo $?
 
--- a/tests/test-basic.out	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-basic.out	Wed Feb 06 19:57:52 2008 -0800
@@ -4,7 +4,7 @@
 date:        Mon Jan 12 13:46:40 1970 +0000
 summary:     test
 
-b789fdd96dc2f3bd229c1dd8eedf0fc60e2b68e3 644 a
+b789fdd96dc2f3bd229c1dd8eedf0fc60e2b68e3 644   a
 a
 checking changesets
 checking manifests
--- a/tests/test-bisect	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-bisect	Wed Feb 06 19:57:52 2008 -0800
@@ -2,9 +2,6 @@
 
 set -e
 
-echo "[extensions]" >> $HGRCPATH
-echo "hbisect=" >> $HGRCPATH
-
 echo % init
 hg init
 
@@ -26,11 +23,34 @@
 hg up -C
 
 echo % bisect test
-hg bisect init
-hg bisect bad
-hg bisect good 1
-hg bisect good
-hg bisect good
-hg bisect good
-hg bisect bad
-hg bisect good
+hg bisect -r
+hg bisect -b
+hg bisect -g 1
+hg bisect -g
+echo skip
+hg bisect -s
+hg bisect -g
+hg bisect -g
+hg bisect -b
+hg bisect -g
+
+echo % bisect reverse test
+hg bisect -r
+hg bisect -b null
+hg bisect -g tip
+hg bisect -g
+echo skip
+hg bisect -s
+hg bisect -g
+hg bisect -g
+hg bisect -b
+hg bisect -g
+
+hg bisect -r
+hg bisect -g tip
+hg bisect -b tip || echo error
+
+hg bisect -r
+hg bisect -g null
+hg bisect -bU tip
+hg id
\ No newline at end of file
--- a/tests/test-bisect.out	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-bisect.out	Wed Feb 06 19:57:52 2008 -0800
@@ -202,7 +202,10 @@
 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
 Testing changeset 23:5ec79163bff4 (15 changesets remaining, ~3 tests)
 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
-Testing changeset 27:288867a866e9 (8 changesets remaining, ~3 tests)
+skip
+Testing changeset 24:10e0acd3809e (15 changesets remaining, ~3 tests)
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+Testing changeset 27:288867a866e9 (7 changesets remaining, ~2 tests)
 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
 Testing changeset 29:b5bd63375ab9 (4 changesets remaining, ~2 tests)
 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
@@ -214,3 +217,27 @@
 date:        Thu Jan 01 00:00:29 1970 +0000
 summary:     msg 29
 
+% bisect reverse test
+Testing changeset 15:e7fa0811edb0 (32 changesets remaining, ~5 tests)
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+Testing changeset 7:03750880c6b5 (16 changesets remaining, ~4 tests)
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+skip
+Testing changeset 6:a3d5c6fdf0d3 (16 changesets remaining, ~4 tests)
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+Testing changeset 2:db07c04beaca (7 changesets remaining, ~2 tests)
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+Testing changeset 0:b99c7b9c8e11 (3 changesets remaining, ~1 tests)
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+Testing changeset 1:5cd978ea5149 (2 changesets remaining, ~1 tests)
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+The first good revision is:
+changeset:   1:5cd978ea5149
+user:        test
+date:        Thu Jan 01 00:00:01 1970 +0000
+summary:     msg 1
+
+abort: Inconsistent state, 31:58c80a7c8a40 is good and bad
+error
+Testing changeset 15:e7fa0811edb0 (32 changesets remaining, ~5 tests)
+5cd978ea5149
--- a/tests/test-bundle	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-bundle	Wed Feb 06 19:57:52 2008 -0800
@@ -2,6 +2,7 @@
 
 cp "$TESTDIR"/printenv.py .
 
+echo "====== Setting up test"
 hg init test
 cd test
 echo 0 > afile
@@ -30,20 +31,40 @@
 hg verify
 cd ..
 hg init empty
+
+echo "====== Bundle test to full.hg"
 hg -R test bundle full.hg empty
+echo "====== Unbundle full.hg in test"
 hg -R test unbundle full.hg
+echo "====== Verify empty"
 hg -R empty heads
 hg -R empty verify
 
+echo "====== Pull full.hg into test (using --cwd)"
 hg --cwd test pull ../full.hg
+echo "====== Pull full.hg into empty (using --cwd)"
 hg --cwd empty pull ../full.hg
+echo "====== Rollback empty"
 hg -R empty rollback
+echo "====== Pull full.hg into empty again (using --cwd)"
 hg --cwd empty pull ../full.hg
 
+echo "====== Pull full.hg into test (using -R)"
+hg -R test pull full.hg
+echo "====== Pull full.hg into empty (using -R)"
+hg -R empty pull full.hg
+echo "====== Rollback empty"
+hg -R empty rollback
+echo "====== Pull full.hg into empty again (using -R)"
+hg -R empty pull full.hg
+
+echo "====== Log -R full.hg in fresh empty"
 rm -r empty
 hg init empty
 cd empty
 hg -R bundle://../full.hg log
+
+echo "====== Pull ../full.hg into empty (with hook)"
 echo '[hooks]' >> .hg/hgrc
 echo 'changegroup = python ../printenv.py changegroup' >> .hg/hgrc
 #doesn't work (yet ?)
@@ -51,18 +72,24 @@
 hg pull bundle://../full.hg
 cd ..
 
+echo "====== Create partial clones"
 rm -r empty
 hg init empty
 hg clone -r 3 test partial
 hg clone partial partial2
 cd partial
+echo "====== Log -R full.hg in partial"
 hg -R bundle://../full.hg log
+echo "====== Incoming full.hg in partial"
 hg incoming bundle://../full.hg
+echo "====== Outgoing -R full.hg vs partial2 in partial"
 hg -R bundle://../full.hg outgoing ../partial2
+echo "====== Outgoing -R does-not-exist.hg vs partial2 in partial"
 hg -R bundle://../does-not-exist.hg outgoing ../partial2
 cd ..
 
 # test for http://www.selenic.com/mercurial/bts/issue216
+echo "====== Unbundle incremental bundles into fresh empty in one go"
 rm -r empty
 hg init empty
 hg -R test bundle --base null -r 0 ../0.hg
@@ -70,6 +97,7 @@
 hg -R empty unbundle -u ../0.hg ../1.hg
 
 # test for 540d1059c802
+echo "====== test for 540d1059c802"
 hg init orig
 cd orig
 echo foo > foo
--- a/tests/test-bundle-r.out	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-bundle-r.out	Wed Feb 06 19:57:52 2008 -0800
@@ -25,6 +25,7 @@
 checking files
 4 files, 9 changesets, 7 total revisions
 searching for changes
+1 changesets found
 adding changesets
 adding manifests
 adding file changes
@@ -37,6 +38,7 @@
 1 files, 1 changesets, 1 total revisions
 0:5649c9d34dd8
 searching for changes
+2 changesets found
 adding changesets
 adding manifests
 adding file changes
@@ -49,6 +51,7 @@
 1 files, 2 changesets, 2 total revisions
 1:10b2180f755b
 searching for changes
+3 changesets found
 adding changesets
 adding manifests
 adding file changes
@@ -61,6 +64,7 @@
 1 files, 3 changesets, 3 total revisions
 2:d62976ca1e50
 searching for changes
+4 changesets found
 adding changesets
 adding manifests
 adding file changes
@@ -73,6 +77,7 @@
 1 files, 4 changesets, 4 total revisions
 3:ac69c658229d
 searching for changes
+2 changesets found
 adding changesets
 adding manifests
 adding file changes
@@ -85,6 +90,7 @@
 1 files, 2 changesets, 2 total revisions
 1:5f4f3ceb285e
 searching for changes
+3 changesets found
 adding changesets
 adding manifests
 adding file changes
@@ -97,6 +103,7 @@
 1 files, 3 changesets, 3 total revisions
 2:024e4e7df376
 searching for changes
+4 changesets found
 adding changesets
 adding manifests
 adding file changes
@@ -109,6 +116,7 @@
 2 files, 4 changesets, 5 total revisions
 3:1e3f6b843bd6
 searching for changes
+5 changesets found
 adding changesets
 adding manifests
 adding file changes
@@ -121,6 +129,7 @@
 3 files, 5 changesets, 6 total revisions
 4:80fe151401c2
 searching for changes
+5 changesets found
 adding changesets
 adding manifests
 adding file changes
@@ -148,6 +157,11 @@
 % should fail
 abort: --base is incompatible with specifiying a destination
 abort: repository default-push not found!
+2 changesets found
+4 changesets found
+6 changesets found
+1 changesets found
+1 changesets found
 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
 % 2
 2:d62976ca1e50
@@ -202,6 +216,7 @@
  adifferentfile
 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
 (branch merge, don't forget to commit)
+7 changesets found
 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
 adding changesets
 adding manifests
--- a/tests/test-bundle.out	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-bundle.out	Wed Feb 06 19:57:52 2008 -0800
@@ -1,3 +1,4 @@
+====== Setting up test
 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
 1 files updated, 0 files merged, 2 files removed, 0 files unresolved
 checking changesets
@@ -5,12 +6,16 @@
 crosschecking files in changesets and manifests
 checking files
 4 files, 9 changesets, 7 total revisions
+====== Bundle test to full.hg
 searching for changes
+9 changesets found
+====== Unbundle full.hg in test
 adding changesets
 adding manifests
 adding file changes
 added 0 changesets with 0 changes to 4 files
 (run 'hg update' to get a working copy)
+====== Verify empty
 changeset:   -1:000000000000
 tag:         tip
 user:        
@@ -21,9 +26,11 @@
 crosschecking files in changesets and manifests
 checking files
 0 files, 0 changesets, 0 total revisions
+====== Pull full.hg into test (using --cwd)
 pulling from ../full.hg
 searching for changes
 no changes found
+====== Pull full.hg into empty (using --cwd)
 pulling from ../full.hg
 requesting all changes
 adding changesets
@@ -31,7 +38,9 @@
 adding file changes
 added 9 changesets with 7 changes to 4 files (+1 heads)
 (run 'hg heads' to see heads, 'hg merge' to merge)
+====== Rollback empty
 rolling back last transaction
+====== Pull full.hg into empty again (using --cwd)
 pulling from ../full.hg
 requesting all changes
 adding changesets
@@ -39,6 +48,25 @@
 adding file changes
 added 9 changesets with 7 changes to 4 files (+1 heads)
 (run 'hg heads' to see heads, 'hg merge' to merge)
+====== Pull full.hg into test (using -R)
+pulling from full.hg
+searching for changes
+no changes found
+====== Pull full.hg into empty (using -R)
+pulling from full.hg
+searching for changes
+no changes found
+====== Rollback empty
+rolling back last transaction
+====== Pull full.hg into empty again (using -R)
+pulling from full.hg
+requesting all changes
+adding changesets
+adding manifests
+adding file changes
+added 9 changesets with 7 changes to 4 files (+1 heads)
+(run 'hg heads' to see heads, 'hg merge' to merge)
+====== Log -R full.hg in fresh empty
 changeset:   8:836ac62537ab
 tag:         tip
 parent:      3:ac69c658229d
@@ -87,6 +115,7 @@
 date:        Mon Jan 12 13:46:40 1970 +0000
 summary:     0.0
 
+====== Pull ../full.hg into empty (with hook)
 changegroup hook: HG_NODE=5649c9d34dd87d0ecb5fd39672128376e83b22e1 HG_SOURCE=pull HG_URL=bundle:../full.hg 
 pulling from bundle://../full.hg
 requesting all changes
@@ -95,6 +124,7 @@
 adding file changes
 added 9 changesets with 7 changes to 4 files (+1 heads)
 (run 'hg heads' to see heads, 'hg merge' to merge)
+====== Create partial clones
 requesting all changes
 adding changesets
 adding manifests
@@ -102,6 +132,7 @@
 added 4 changesets with 4 changes to 1 files
 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+====== Log -R full.hg in partial
 changeset:   8:836ac62537ab
 tag:         tip
 parent:      3:ac69c658229d
@@ -150,6 +181,7 @@
 date:        Mon Jan 12 13:46:40 1970 +0000
 summary:     0.0
 
+====== Incoming full.hg in partial
 comparing with bundle://../full.hg
 searching for changes
 changeset:   4:5f4f3ceb285e
@@ -180,6 +212,7 @@
 date:        Mon Jan 12 13:46:40 1970 +0000
 summary:     0.3m
 
+====== Outgoing -R full.hg vs partial2 in partial
 comparing with ../partial2
 searching for changes
 changeset:   4:5f4f3ceb285e
@@ -210,7 +243,11 @@
 date:        Mon Jan 12 13:46:40 1970 +0000
 summary:     0.3m
 
+====== Outgoing -R does-not-exist.hg vs partial2 in partial
 abort: No such file or directory: ../does-not-exist.hg
+====== Unbundle incremental bundles into fresh empty in one go
+1 changesets found
+1 changesets found
 adding changesets
 adding manifests
 adding file changes
@@ -220,8 +257,10 @@
 adding file changes
 added 1 changesets with 1 changes to 1 files
 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+====== test for 540d1059c802
 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
 searching for changes
+1 changesets found
 comparing with ../bundle.hg
 searching for changes
 changeset:   2:ed1b79f46b9a
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-changelog-exec	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,32 @@
+#!/bin/sh
+# b51a8138292a introduced a regression where we would mention in the
+# changelog executable files added by the second parent of a merge.
+# Test that that doesn't happen anymore
+
+"$TESTDIR/hghave" execbit || exit 80
+
+hg init repo
+cd repo
+echo foo > foo
+hg ci -qAm 'add foo' -d '0 0'
+
+echo bar > bar
+chmod +x bar
+hg ci -qAm 'add bar' -d '0 0'
+echo '% manifest of p2:'
+hg manifest
+echo
+
+hg up -qC 0
+echo >> foo
+hg ci -m 'change foo' -d '0 0'
+echo '% manifest of p1:'
+hg manifest
+
+hg merge
+hg ci -m 'merge' -d '0 0'
+
+echo '% this should not mention bar:'
+hg tip -v
+
+hg debugindex .hg/store/data/bar.i
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-changelog-exec.out	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,21 @@
+% manifest of p2:
+bar
+foo
+
+% manifest of p1:
+foo
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+(branch merge, don't forget to commit)
+% this should not mention bar:
+changeset:   3:ef2fc9b4a51b
+tag:         tip
+parent:      2:ed1b79f46b9a
+parent:      1:d394a8db219b
+user:        test
+date:        Thu Jan 01 00:00:00 1970 +0000
+description:
+merge
+
+
+   rev    offset  length   base linkrev nodeid       p1           p2
+     0         0       5      0       1 b004912a8510 000000000000 000000000000
--- a/tests/test-clone	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-clone	Wed Feb 06 19:57:52 2008 -0800
@@ -25,3 +25,11 @@
 hg clone ../a
 cd a
 hg cat a
+
+# check that we drop the file:// from the path before
+# writing the .hgrc
+cd ../..
+hg clone file://a e
+grep 'file:' e/.hg/hgrc
+
+exit 0
--- a/tests/test-clone-failure	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-clone-failure	Wed Feb 06 19:57:52 2008 -0800
@@ -26,10 +26,15 @@
 rm -r a b
 
 # Source of wrong type
-mkfifo a
-hg clone a b
-echo $?
-rm a
+if "$TESTDIR/hghave" -q fifo; then
+    mkfifo a
+    hg clone a b
+    echo $?
+    rm a
+else
+    echo "abort: repository a not found!"
+    echo 255
+fi
 
 # Default destination, same directory
 mkdir q
--- a/tests/test-clone.out	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-clone.out	Wed Feb 06 19:57:52 2008 -0800
@@ -14,3 +14,4 @@
 destination directory: a
 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
 a
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
--- a/tests/test-command-template	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-command-template	Wed Feb 06 19:57:52 2008 -0800
@@ -89,8 +89,8 @@
 cat changelog
 
 echo "# keys work"
-for key in author branches date desc file_adds file_dels files \
-        manifest node parents rev tags; do
+for key in author branches date desc file_adds file_dels file_mods \
+        files manifest node parents rev tags; do
     for mode in '' --verbose --debug; do
         hg log $mode --template "$key$mode: {$key}\n"
     done
--- a/tests/test-command-template.out	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-command-template.out	Wed Feb 06 19:57:52 2008 -0800
@@ -260,22 +260,22 @@
 other 3
 desc--debug: line 1
 line 2
-file_adds: 
-file_adds: 
+file_adds: second
 file_adds: 
-file_adds: 
-file_adds: 
-file_adds: 
+file_adds: d
 file_adds: 
 file_adds: 
+file_adds: c
+file_adds: b
+file_adds: a
+file_adds--verbose: second
 file_adds--verbose: 
-file_adds--verbose: 
+file_adds--verbose: d
 file_adds--verbose: 
 file_adds--verbose: 
-file_adds--verbose: 
-file_adds--verbose: 
-file_adds--verbose: 
-file_adds--verbose: 
+file_adds--verbose: c
+file_adds--verbose: b
+file_adds--verbose: a
 file_adds--debug: second
 file_adds--debug: 
 file_adds--debug: d
@@ -308,6 +308,30 @@
 file_dels--debug: 
 file_dels--debug: 
 file_dels--debug: 
+file_mods: 
+file_mods: 
+file_mods: 
+file_mods: 
+file_mods: c
+file_mods: 
+file_mods: 
+file_mods: 
+file_mods--verbose: 
+file_mods--verbose: 
+file_mods--verbose: 
+file_mods--verbose: 
+file_mods--verbose: c
+file_mods--verbose: 
+file_mods--verbose: 
+file_mods--verbose: 
+file_mods--debug: 
+file_mods--debug: 
+file_mods--debug: 
+file_mods--debug: 
+file_mods--debug: c
+file_mods--debug: 
+file_mods--debug: 
+file_mods--debug: 
 files: second
 files: 
 files: d
@@ -324,30 +348,30 @@
 files--verbose: c
 files--verbose: b
 files--verbose: a
-files--debug: 
+files--debug: second
 files--debug: 
-files--debug: 
+files--debug: d
 files--debug: 
 files--debug: c
-files--debug: 
-files--debug: 
-files--debug: 
-manifest: 
-manifest: 
-manifest: 
-manifest: 
-manifest: 
-manifest: 
-manifest: 
-manifest: 
-manifest--verbose: 
-manifest--verbose: 
-manifest--verbose: 
-manifest--verbose: 
-manifest--verbose: 
-manifest--verbose: 
-manifest--verbose: 
-manifest--verbose: 
+files--debug: c
+files--debug: b
+files--debug: a
+manifest: 7:f2dbc354b94e
+manifest: 6:91015e9dbdd7
+manifest: 5:4dc3def4f9b4
+manifest: 4:90ae8dda64e1
+manifest: 3:cb5a1327723b
+manifest: 2:6e0e82995c35
+manifest: 1:4e8d705b1e53
+manifest: 0:a0c8bcbbb45c
+manifest--verbose: 7:f2dbc354b94e
+manifest--verbose: 6:91015e9dbdd7
+manifest--verbose: 5:4dc3def4f9b4
+manifest--verbose: 4:90ae8dda64e1
+manifest--verbose: 3:cb5a1327723b
+manifest--verbose: 2:6e0e82995c35
+manifest--verbose: 1:4e8d705b1e53
+manifest--verbose: 0:a0c8bcbbb45c
 manifest--debug: 7:f2dbc354b94e5ec0b4f10680ee0cee816101d0bf
 manifest--debug: 6:91015e9dbdd76a6791085d12b0a0ec7fcd22ffbf
 manifest--debug: 5:4dc3def4f9b4c6e8de820f6ee74737f91e96a216
--- a/tests/test-commit	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-commit	Wed Feb 06 19:57:52 2008 -0800
@@ -10,6 +10,7 @@
 cd test
 echo foo > foo
 hg add foo
+HGEDITOR=true hg commit -m ""
 hg commit -d '0 0' -m commit-1
 echo foo >> foo
 hg commit -d '1 4444444' -m commit-3
--- a/tests/test-commit.out	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-commit.out	Wed Feb 06 19:57:52 2008 -0800
@@ -1,6 +1,9 @@
 % commit date test
 transaction abort!
 rollback completed
+abort: empty commit message
+transaction abort!
+rollback completed
 abort: impossible time zone offset: 4444444
 transaction abort!
 rollback completed
--- a/tests/test-conflict.out	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-conflict.out	Wed Feb 06 19:57:52 2008 -0800
@@ -1,15 +1,16 @@
 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+merging a
 warning: conflicts during merge.
-merging a
 merging a failed!
 0 files updated, 0 files merged, 0 files removed, 1 files unresolved
 There are unresolved merges, you can redo the full merge using:
   hg update -C 2
   hg merge 1
 e7fe8eb3e180+0d24b7662d3e+ tip
-<<<<<<< my
+<<<<<<< local
 something else
 =======
 something
 >>>>>>> other
 M a
+? a.orig
--- a/tests/test-context.py	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-context.py	Wed Feb 06 19:57:52 2008 -0800
@@ -5,7 +5,6 @@
 
 repo = hg.repository(u, 'test1', create=1)
 os.chdir('test1')
-repo = hg.repository(u, '.') # FIXME: can't lock repo without doing this
 
 # create 'foo' with fixed time stamp
 f = file('foo', 'w')
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-convert	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,46 @@
+#!/bin/sh
+
+cat >> $HGRCPATH <<EOF
+[extensions]
+convert=
+[convert]
+hg.saverev=False
+EOF
+
+hg help convert
+
+hg init a
+cd a
+echo a > a
+hg ci -d'0 0' -Ama
+hg cp a b
+hg ci -d'1 0' -mb
+hg rm a
+hg ci -d'2 0' -mc
+hg mv b a
+hg ci -d'3 0' -md
+echo a >> a
+hg ci -d'4 0' -me
+
+cd ..
+hg convert a 2>&1 | grep -v 'subversion python bindings could not be loaded'
+hg --cwd a-hg pull ../a
+
+touch bogusfile
+echo % should fail
+hg convert a bogusfile
+
+mkdir bogusdir
+chmod 000 bogusdir
+
+echo % should fail
+hg convert a bogusdir
+
+echo % should succeed
+chmod 700 bogusdir
+hg convert a bogusdir
+
+echo % test pre and post conversion actions
+echo 'include b' > filemap
+hg convert --debug --filemap filemap a partialb | \
+    grep 'run hg'
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-convert-clonebranches	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,54 @@
+#!/bin/sh
+
+echo "[extensions]" >> $HGRCPATH
+echo "hgext.convert = " >> $HGRCPATH
+echo "[convert]" >> $HGRCPATH
+echo "hg.tagsbranch=0" >> $HGRCPATH
+
+hg init source
+cd source
+echo a > a
+hg ci -qAm adda
+# Add a merge with one parent in the same branch
+echo a >> a
+hg ci -qAm changea
+hg up -qC 0
+hg branch branch0
+echo b > b
+hg ci -qAm addb
+hg up -qC
+hg merge
+hg ci -qm mergeab
+hg tag -ql mergeab
+cd ..
+
+# Miss perl... sometimes
+cat > filter.py <<EOF
+import sys, re
+
+r = re.compile(r'^(?:\d+|pulling from)')
+sys.stdout.writelines([l for l in sys.stdin if r.search(l)])
+EOF
+
+echo % convert
+hg convert -v --config convert.hg.clonebranches=1 source dest |
+    python filter.py
+
+# Add a merge with both parents and child in different branches
+cd source
+hg branch branch1
+echo a > file1
+hg ci -qAm c1
+hg up -qC mergeab
+hg branch branch2
+echo a > file2
+hg ci -qAm c2
+hg merge branch1
+hg branch branch3
+hg ci -qAm c3
+cd ..
+
+echo % incremental conversion
+hg convert -v --config convert.hg.clonebranches=1 source dest |
+    python filter.py
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-convert-clonebranches.out	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,29 @@
+marked working directory as branch branch0
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+(branch merge, don't forget to commit)
+% convert
+3 adda
+2 addb
+pulling from default into branch0
+1 changesets found
+1 changea
+0 mergeab
+pulling from default into branch0
+1 changesets found
+marked working directory as branch branch1
+marked working directory as branch branch2
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+(branch merge, don't forget to commit)
+marked working directory as branch branch3
+% incremental conversion
+2 c1
+pulling from branch0 into branch1
+2 changesets found
+1 c2
+pulling from branch0 into branch2
+2 changesets found
+0 c3
+pulling from branch2 into branch3
+3 changesets found
+pulling from branch1 into branch3
+1 changesets found
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-convert-cvs	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,95 @@
+#!/bin/sh
+
+"$TESTDIR/hghave" cvs cvsps || exit 80
+
+cvscall()
+{
+    cvs -f $@
+}
+
+echo "[extensions]" >> $HGRCPATH
+echo "convert = " >> $HGRCPATH
+
+echo % create cvs repository
+mkdir cvsrepo
+cd cvsrepo
+export CVSROOT=`pwd`
+export CVS_OPTIONS=-f
+cd ..
+
+cvscall -q -d "$CVSROOT" init
+
+echo % create source directory
+mkdir src-temp
+cd src-temp
+echo a > a
+mkdir b
+cd b
+echo c > c
+cd ..
+
+echo % import source directory
+cvscall -q import -m import src INITIAL start
+cd ..
+
+echo % checkout source directory
+cvscall -q checkout src
+
+echo % commit a new revision changing b/c
+cd src
+echo c >> b/c
+cvscall -q commit -mci0 . | grep '<--' |\
+    sed -e 's:.*src/\(.*\),v.*:checking in src/\1,v:g'
+cd ..
+
+echo % convert fresh repo
+hg convert src src-hg | sed -e 's/connecting to.*cvsrepo/connecting to cvsrepo/g'
+cat src-hg/a
+cat src-hg/b/c
+
+echo % convert fresh repo with --filemap
+echo include b/c > filemap
+hg convert --filemap filemap src src-filemap | sed -e 's/connecting to.*cvsrepo/connecting to cvsrepo/g'
+cat src-hg/b/c
+hg -R src-filemap log --template '#rev# #desc# files: #files#\n'
+
+echo % commit new file revisions
+cd src
+echo a >> a
+echo c >> b/c
+cvscall -q commit -mci1 . | grep '<--' |\
+    sed -e 's:.*src/\(.*\),v.*:checking in src/\1,v:g'
+cd ..
+
+echo % convert again
+hg convert src src-hg | sed -e 's/connecting to.*cvsrepo/connecting to cvsrepo/g'
+cat src-hg/a
+cat src-hg/b/c
+
+echo % convert again with --filemap
+hg convert --filemap filemap src src-filemap | sed -e 's/connecting to.*cvsrepo/connecting to cvsrepo/g'
+cat src-hg/b/c
+hg -R src-filemap log --template '#rev# #desc# files: #files#\n'
+
+echo % commit branch
+cd src
+cvs -q update -r1.1 b/c
+cvs -q tag -b branch
+cvs -q update -r branch
+echo d >> b/c
+cvs -q commit -mci2 . | grep '<--' |\
+    sed -e 's:.*src/\(.*\),v.*:checking in src/\1,v:g'
+cd ..
+
+echo % convert again
+hg convert src src-hg | sed -e 's/connecting to.*cvsrepo/connecting to cvsrepo/g'
+cat src-hg/a
+cat src-hg/b/c
+
+echo % convert again with --filemap
+hg convert --filemap filemap src src-filemap | sed -e 's/connecting to.*cvsrepo/connecting to cvsrepo/g'
+cat src-hg/b/c
+hg -R src-filemap log --template '#rev# #desc# files: #files#\n'
+
+echo "graphlog = " >> $HGRCPATH
+hg -R src-hg glog --template '#rev# (#branches#) #desc# files: #files#\n'
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-convert-cvs.out	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,109 @@
+% create cvs repository
+% create source directory
+% import source directory
+N src/a
+N src/b/c
+
+No conflicts created by this import
+
+% checkout source directory
+U src/a
+U src/b/c
+% commit a new revision changing b/c
+checking in src/b/c,v
+% convert fresh repo
+initializing destination src-hg repository
+connecting to cvsrepo
+scanning source...
+sorting...
+converting...
+2 Initial revision
+1 import
+0 ci0
+updating tags
+a
+c
+c
+% convert fresh repo with --filemap
+initializing destination src-filemap repository
+connecting to cvsrepo
+scanning source...
+sorting...
+converting...
+2 Initial revision
+1 import
+rolling back last transaction
+0 ci0
+updating tags
+c
+c
+2 update tags files: .hgtags
+1 ci0 files: b/c
+0 Initial revision files: b/c
+% commit new file revisions
+checking in src/a,v
+checking in src/b/c,v
+% convert again
+connecting to cvsrepo
+scanning source...
+sorting...
+converting...
+0 ci1
+a
+a
+c
+c
+c
+% convert again with --filemap
+connecting to cvsrepo
+scanning source...
+sorting...
+converting...
+0 ci1
+c
+c
+c
+3 ci1 files: b/c
+2 update tags files: .hgtags
+1 ci0 files: b/c
+0 Initial revision files: b/c
+% commit branch
+U b/c
+T a
+T b/c
+checking in src/b/c,v
+% convert again
+connecting to cvsrepo
+scanning source...
+sorting...
+converting...
+0 ci2
+a
+a
+c
+d
+% convert again with --filemap
+connecting to cvsrepo
+scanning source...
+sorting...
+converting...
+0 ci2
+c
+d
+4 ci2 files: b/c
+3 ci1 files: b/c
+2 update tags files: .hgtags
+1 ci0 files: b/c
+0 Initial revision files: b/c
+o  5 (branch) ci2 files: b/c
+|
+| o  4 () ci1 files: a b/c
+| |
+| o  3 () update tags files: .hgtags
+| |
+| o  2 () ci0 files: b/c
+|/
+| o  1 (INITIAL) import files:
+|/
+o  0 () Initial revision files: a b/c
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-convert-darcs	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,61 @@
+#!/bin/sh
+
+"$TESTDIR/hghave" darcs || exit 80
+
+echo "[extensions]" >> $HGRCPATH
+echo "convert=" >> $HGRCPATH
+echo 'hgext.graphlog =' >> $HGRCPATH
+
+DARCS_EMAIL='test@example.org'; export DARCS_EMAIL
+HOME=do_not_use_HOME_darcs; export HOME
+
+# skip if we can't import elementtree
+mkdir dummy
+mkdir dummy/_darcs
+if hg convert dummy 2>&1 | grep ElementTree > /dev/null; then
+    echo 'skipped: missing feature: elementtree module'
+    exit 80
+fi
+
+echo % initialize darcs repo
+mkdir darcs-repo
+cd darcs-repo
+darcs init
+echo a > a
+darcs record -a -l -m p0
+cd ..
+
+echo % branch and update
+darcs get darcs-repo darcs-clone >/dev/null
+cd darcs-clone
+echo c >> a
+echo c > c
+darcs record -a -l -m p1.1
+cd ..
+
+echo % update source
+cd darcs-repo
+echo b >> a
+echo b > b
+darcs record -a -l -m p1.2
+
+echo % merge branch
+darcs pull -a ../darcs-clone
+echo e > a
+darcs record -a -l -m p2
+cd ..
+
+glog()
+{
+    hg glog --template '#rev# "#desc|firstline#" files: #files#\n' "$@"
+}
+
+hg convert darcs-repo darcs-repo-hg 2>&1 | grep -v hGetLine | grep -v '^$'
+# The converter does not currently handle patch conflicts very well.
+# When they occur, it reverts *all* changes and moves forward,
+# letting the conflict resolving patch fix collisions.
+# Unfortunately, non-conflicting changes, like the addition of the
+# "c" file in p1.1 patch are reverted too.
+# Just to say that manifest not listing "c" here is a bug.
+glog -R darcs-repo-hg
+hg -R darcs-repo-hg manifest --debug
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-convert-darcs.out	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,29 @@
+% initialize darcs repo
+Finished recording patch 'p0'
+% branch and update
+Finished recording patch 'p1.1'
+% update source
+Finished recording patch 'p1.2'
+% merge branch
+We have conflicts in the following files:
+./a
+Finished pulling and applying.
+Finished recording patch 'p2'
+initializing destination darcs-repo-hg repository
+scanning source...
+sorting...
+converting...
+3 p0
+2 p1.2
+1 p1.1
+0 p2
+o  3 "p2" files: a
+|
+o  2 "p1.1" files:
+|
+o  1 "p1.2" files: a b
+|
+o  0 "p0" files: a
+
+7225b30cdf38257d5cc7780772c051b6f33e6d6b 644   a
+1e88685f5ddec574a34c70af492f95b6debc8741 644   b
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-convert-filemap	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,124 @@
+#!/bin/sh
+
+HGMERGE=true; export HGMERGE
+
+echo '[extensions]' >> $HGRCPATH
+echo 'hgext.graphlog =' >> $HGRCPATH
+echo 'hgext.convert =' >> $HGRCPATH
+
+glog()
+{
+    hg glog --template '#rev# "#desc#" files: #files#\n' "$@"
+}
+
+hg init source
+cd source
+
+echo foo > foo
+echo baz > baz
+mkdir dir
+echo dir/file >> dir/file
+echo dir/file2 >> dir/file2
+hg ci -d '0 0' -qAm '0: add foo baz dir/'
+
+echo bar > bar
+echo quux > quux
+hg copy foo copied
+hg ci -d '1 0' -qAm '1: add bar quux; copy foo to copied'
+
+echo >> foo
+hg ci -d '2 0' -m '2: change foo'
+
+hg up -qC 1
+echo >> bar
+echo >> quux
+hg ci -d '3 0' -m '3: change bar quux'
+
+hg up -qC 2
+hg merge -qr 3
+echo >> bar
+echo >> baz
+hg ci -d '4 0' -m '4: first merge; change bar baz'
+
+echo >> bar
+echo 1 >> baz
+echo >> quux
+hg ci -d '5 0' -m '5: change bar baz quux'
+
+hg up -qC 4
+echo >> foo
+echo 2 >> baz
+hg ci -d '6 0' -m '6: change foo baz'
+
+hg up -qC 5
+hg merge -qr 6
+echo >> bar
+hg ci -d '7 0' -m '7: second merge; change bar'
+
+echo >> foo
+hg ci -m '8: change foo'
+
+glog
+
+echo '% final file versions in this repo:'
+hg manifest --debug
+hg debugrename copied
+echo
+
+cd ..
+
+splitrepo()
+{
+    msg="$1"
+    files="$2"
+    opts=$3
+    echo "% $files: $msg"
+    prefix=`echo "$files" | sed -e 's/ /-/g'`
+    fmap="$prefix.fmap"
+    repo="$prefix.repo"
+    for i in $files; do
+	echo "include $i" >> "$fmap"
+    done
+    hg -q convert $opts --filemap "$fmap" --datesort source "$repo"
+    glog -R "$repo"
+    hg -R "$repo" manifest --debug
+}
+
+splitrepo 'skip unwanted merges; use 1st parent in 1st merge, 2nd in 2nd' foo
+
+splitrepo 'merges are not merges anymore' bar
+
+splitrepo '1st merge is not a merge anymore; 2nd still is' baz
+
+splitrepo 'we add additional merges when they are interesting' 'foo quux'
+
+splitrepo 'partial conversion' 'bar quux' '-r 3'
+splitrepo 'complete the partial conversion' 'bar quux'
+
+rm -r foo.repo
+splitrepo 'partial conversion' 'foo' '-r 3'
+splitrepo 'complete the partial conversion' 'foo'
+
+splitrepo 'copied file; source not included in new repo' copied
+hg --cwd copied.repo debugrename copied
+
+splitrepo 'copied file; source included in new repo' 'foo copied'
+hg --cwd foo-copied.repo debugrename copied
+
+cat > renames.fmap <<EOF
+include dir
+exclude dir/file2
+rename dir dir2
+include foo
+include copied
+rename foo foo2
+rename copied copied2
+EOF
+hg -q convert --filemap renames.fmap --datesort source renames.repo
+glog -R renames.repo
+hg -R renames.repo manifest --debug
+hg --cwd renames.repo debugrename copied2
+echo 'copied:'
+hg --cwd source cat copied
+echo 'copied2:'
+hg --cwd renames.repo cat copied2
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-convert-filemap.out	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,154 @@
+@  8 "8: change foo" files: foo
+|
+o    7 "7: second merge; change bar" files: bar baz
+|\
+| o  6 "6: change foo baz" files: baz foo
+| |
+o |  5 "5: change bar baz quux" files: bar baz quux
+|/
+o    4 "4: first merge; change bar baz" files: bar baz
+|\
+| o  3 "3: change bar quux" files: bar quux
+| |
+o |  2 "2: change foo" files: foo
+|/
+o  1 "1: add bar quux; copy foo to copied" files: bar copied quux
+|
+o  0 "0: add foo baz dir/" files: baz dir/file dir/file2 foo
+
+% final file versions in this repo:
+9463f52fe115e377cf2878d4fc548117211063f2 644   bar
+94c1be4dfde2ee8d78db8bbfcf81210813307c3d 644   baz
+6ca237634e1f6bee1b6db94292fb44f092a25842 644   copied
+3e20847584beff41d7cd16136b7331ab3d754be0 644   dir/file
+75e6d3f8328f5f6ace6bf10b98df793416a09dca 644   dir/file2
+9a7b52012991e4873687192c3e17e61ba3e837a3 644   foo
+bc3eca3f47023a3e70ca0d8cc95a22a6827db19d 644   quux
+copied renamed from foo:2ed2a3912a0b24502043eae84ee4b279c18b90dd
+
+% foo: skip unwanted merges; use 1st parent in 1st merge, 2nd in 2nd
+o  3 "8: change foo" files: foo
+|
+o  2 "6: change foo baz" files: foo
+|
+o  1 "2: change foo" files: foo
+|
+o  0 "0: add foo baz dir/" files: foo
+
+9a7b52012991e4873687192c3e17e61ba3e837a3 644   foo
+% bar: merges are not merges anymore
+o  4 "7: second merge; change bar" files: bar
+|
+o  3 "5: change bar baz quux" files: bar
+|
+o  2 "4: first merge; change bar baz" files: bar
+|
+o  1 "3: change bar quux" files: bar
+|
+o  0 "1: add bar quux; copy foo to copied" files: bar
+
+9463f52fe115e377cf2878d4fc548117211063f2 644   bar
+% baz: 1st merge is not a merge anymore; 2nd still is
+o    4 "7: second merge; change bar" files: baz
+|\
+| o  3 "6: change foo baz" files: baz
+| |
+o |  2 "5: change bar baz quux" files: baz
+|/
+o  1 "4: first merge; change bar baz" files: baz
+|
+o  0 "0: add foo baz dir/" files: baz
+
+94c1be4dfde2ee8d78db8bbfcf81210813307c3d 644   baz
+% foo quux: we add additional merges when they are interesting
+o  8 "8: change foo" files: foo
+|
+o    7 "7: second merge; change bar" files:
+|\
+| o  6 "6: change foo baz" files: foo
+| |
+o |  5 "5: change bar baz quux" files: quux
+|/
+o    4 "4: first merge; change bar baz" files:
+|\
+| o  3 "3: change bar quux" files: quux
+| |
+o |  2 "2: change foo" files: foo
+|/
+o  1 "1: add bar quux; copy foo to copied" files: quux
+|
+o  0 "0: add foo baz dir/" files: foo
+
+9a7b52012991e4873687192c3e17e61ba3e837a3 644   foo
+bc3eca3f47023a3e70ca0d8cc95a22a6827db19d 644   quux
+% bar quux: partial conversion
+o  1 "3: change bar quux" files: bar quux
+|
+o  0 "1: add bar quux; copy foo to copied" files: bar quux
+
+b79105bedc55102f394e90a789c9c380117c1b4a 644   bar
+db0421cc6b685a458c8d86c7d5c004f94429ea23 644   quux
+% bar quux: complete the partial conversion
+o  4 "7: second merge; change bar" files: bar
+|
+o  3 "5: change bar baz quux" files: bar quux
+|
+o  2 "4: first merge; change bar baz" files: bar
+|
+o  1 "3: change bar quux" files: bar quux
+|
+o  0 "1: add bar quux; copy foo to copied" files: bar quux
+
+9463f52fe115e377cf2878d4fc548117211063f2 644   bar
+bc3eca3f47023a3e70ca0d8cc95a22a6827db19d 644   quux
+% foo: partial conversion
+o  0 "0: add foo baz dir/" files: foo
+
+2ed2a3912a0b24502043eae84ee4b279c18b90dd 644   foo
+% foo: complete the partial conversion
+o  3 "8: change foo" files: foo
+|
+o  2 "6: change foo baz" files: foo
+|
+o  1 "2: change foo" files: foo
+|
+o  0 "0: add foo baz dir/" files: foo
+
+9a7b52012991e4873687192c3e17e61ba3e837a3 644   foo
+% copied: copied file; source not included in new repo
+o  0 "1: add bar quux; copy foo to copied" files: copied
+
+2ed2a3912a0b24502043eae84ee4b279c18b90dd 644   copied
+copied not renamed
+% foo copied: copied file; source included in new repo
+o  4 "8: change foo" files: foo
+|
+o  3 "6: change foo baz" files: foo
+|
+o  2 "2: change foo" files: foo
+|
+o  1 "1: add bar quux; copy foo to copied" files: copied
+|
+o  0 "0: add foo baz dir/" files: foo
+
+6ca237634e1f6bee1b6db94292fb44f092a25842 644   copied
+9a7b52012991e4873687192c3e17e61ba3e837a3 644   foo
+copied renamed from foo:2ed2a3912a0b24502043eae84ee4b279c18b90dd
+o  4 "8: change foo" files: foo2
+|
+o  3 "6: change foo baz" files: foo2
+|
+o  2 "2: change foo" files: foo2
+|
+o  1 "1: add bar quux; copy foo to copied" files: copied2
+|
+o  0 "0: add foo baz dir/" files: dir2/file foo2
+
+e5e3d520be9be45937d0b06b004fadcd6c221fa2 644   copied2
+3e20847584beff41d7cd16136b7331ab3d754be0 644   dir2/file
+9a7b52012991e4873687192c3e17e61ba3e837a3 644   foo2
+copied2 renamed from foo2:2ed2a3912a0b24502043eae84ee4b279c18b90dd
+copied:
+foo
+copied2:
+foo
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-convert-git	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,132 @@
+#!/bin/sh
+
+"$TESTDIR/hghave" git || exit 80
+
+echo "[extensions]" >> $HGRCPATH
+echo "convert=" >> $HGRCPATH
+echo 'hgext.graphlog =' >> $HGRCPATH
+
+GIT_AUTHOR_NAME='test'; export GIT_AUTHOR_NAME
+GIT_AUTHOR_EMAIL='test@example.org'; export GIT_AUTHOR_EMAIL
+GIT_AUTHOR_DATE="2007-01-01 00:00:00 +0000"; export GIT_AUTHOR_DATE
+GIT_COMMITTER_NAME="$GIT_AUTHOR_NAME"; export GIT_COMMITTER_NAME
+GIT_COMMITTER_EMAIL="$GIT_AUTHOR_EMAIL"; export GIT_COMMITTER_EMAIL
+GIT_COMMITTER_DATE="$GIT_AUTHOR_DATE"; export GIT_COMMITTER_DATE
+
+count=10
+commit()
+{
+    GIT_AUTHOR_DATE="2007-01-01 00:00:$count +0000"
+    GIT_COMMITTER_DATE="$GIT_AUTHOR_DATE"
+    git commit "$@" >/dev/null 2>/dev/null || echo "git commit error"
+    count=`expr $count + 1`
+}
+
+mkdir git-repo
+cd git-repo
+git init-db >/dev/null 2>/dev/null
+echo a > a
+mkdir d
+echo b > d/b
+git add a d
+commit -a -m t1
+
+# Remove the directory, then try to replace it with a file
+# (issue 754)
+git rm -f d/b
+commit -m t2
+echo d > d
+git add d
+commit -m t3
+
+echo b >> a
+commit -a -m t4.1
+
+git checkout -b other HEAD^ >/dev/null 2>/dev/null
+echo c > a
+echo a >> a
+commit -a -m t4.2
+
+git checkout master >/dev/null 2>/dev/null
+git pull --no-commit . other > /dev/null 2>/dev/null
+commit -m 'Merge branch other'
+cd ..
+
+hg convert --datesort git-repo
+
+hg -R git-repo-hg tip -v
+
+count=10
+mkdir git-repo2
+cd git-repo2
+git init-db >/dev/null 2>/dev/null
+
+echo foo > foo
+git add foo
+commit -a -m 'add foo'
+
+echo >> foo
+commit -a -m 'change foo'
+
+git checkout -b Bar HEAD^ >/dev/null 2>/dev/null
+echo quux >> quux
+git add quux
+commit -a -m 'add quux'
+
+echo bar > bar
+git add bar
+commit -a -m 'add bar'
+
+git checkout -b Baz HEAD^ >/dev/null 2>/dev/null
+echo baz > baz
+git add baz
+commit -a -m 'add baz'
+
+git checkout master >/dev/null 2>/dev/null
+git pull --no-commit . Bar Baz > /dev/null 2>/dev/null
+commit -m 'Octopus merge'
+
+echo bar >> bar
+commit -a -m 'change bar'
+
+git checkout -b Foo HEAD^ >/dev/null 2>/dev/null
+echo >> foo
+commit -a -m 'change foo'
+
+git checkout master >/dev/null 2>/dev/null
+git pull --no-commit -s ours . Foo > /dev/null 2>/dev/null
+commit -m 'Discard change to foo'
+
+cd ..
+
+glog()
+{
+    hg glog --template '#rev# "#desc|firstline#" files: #files#\n' "$@"
+}
+
+splitrepo()
+{
+    msg="$1"
+    files="$2"
+    opts=$3
+    echo "% $files: $msg"
+    prefix=`echo "$files" | sed -e 's/ /-/g'`
+    fmap="$prefix.fmap"
+    repo="$prefix.repo"
+    for i in $files; do
+	echo "include $i" >> "$fmap"
+    done
+    hg -q convert $opts --filemap "$fmap" --datesort git-repo2 "$repo"
+    glog -R "$repo"
+    hg -R "$repo" manifest --debug
+}
+
+echo '% full conversion'
+hg -q convert --datesort git-repo2 fullrepo
+glog -R fullrepo
+hg -R fullrepo manifest --debug
+
+splitrepo 'octopus merge' 'foo bar baz'
+
+splitrepo 'only some parents of an octopus merge; "discard" a head' 'foo baz quux'
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-convert-git.out	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,90 @@
+rm 'd/b'
+assuming destination git-repo-hg
+initializing destination git-repo-hg repository
+scanning source...
+sorting...
+converting...
+5 t1
+4 t2
+3 t3
+2 t4.1
+1 t4.2
+0 Merge branch other
+changeset:   5:c6d72c98aa00
+tag:         tip
+parent:      3:a18bdfccf429
+parent:      4:48cb5b72ce56
+user:        test <test@example.org>
+date:        Mon Jan 01 00:00:15 2007 +0000
+files:       a
+description:
+Merge branch other
+
+committer: test <test@example.org>
+
+
+% full conversion
+o    9 "Discard change to foo" files: foo
+|\
+| o  8 "change foo" files: foo
+| |
+o |  7 "change bar" files: bar
+|/
+o    6 "(octopus merge fixup)" files:
+|\
+| o    5 "Octopus merge" files: baz
+| |\
+o | |  4 "add baz" files: baz
+| | |
++---o  3 "add bar" files: bar
+| |
+o |  2 "add quux" files: quux
+| |
+| o  1 "change foo" files: foo
+|/
+o  0 "add foo" files: foo
+
+245a3b8bc653999c2b22cdabd517ccb47aecafdf 644   bar
+354ae8da6e890359ef49ade27b68bbc361f3ca88 644   baz
+9277c9cc8dd4576fc01a17939b4351e5ada93466 644   foo
+88dfeab657e8cf2cef3dec67b914f49791ae76b1 644   quux
+% foo bar baz: octopus merge
+o    8 "Discard change to foo" files: foo
+|\
+| o  7 "change foo" files: foo
+| |
+o |  6 "change bar" files: bar
+|/
+o    5 "(octopus merge fixup)" files:
+|\
+| o    4 "Octopus merge" files: baz
+| |\
+o | |  3 "add baz" files: baz
+| | |
++---o  2 "add bar" files: bar
+| |
+| o  1 "change foo" files: foo
+|/
+o  0 "add foo" files: foo
+
+245a3b8bc653999c2b22cdabd517ccb47aecafdf 644   bar
+354ae8da6e890359ef49ade27b68bbc361f3ca88 644   baz
+9277c9cc8dd4576fc01a17939b4351e5ada93466 644   foo
+% foo baz quux: only some parents of an octopus merge; "discard" a head
+o  6 "Discard change to foo" files: foo
+|
+o  5 "change foo" files: foo
+|
+o    4 "Octopus merge" files:
+|\
+| o  3 "add baz" files: baz
+| |
+| o  2 "add quux" files: quux
+| |
+o |  1 "change foo" files: foo
+|/
+o  0 "add foo" files: foo
+
+354ae8da6e890359ef49ade27b68bbc361f3ca88 644   baz
+9277c9cc8dd4576fc01a17939b4351e5ada93466 644   foo
+88dfeab657e8cf2cef3dec67b914f49791ae76b1 644   quux
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-convert-hg-sink	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,57 @@
+#!/bin/sh
+
+cat >> $HGRCPATH <<EOF
+[extensions]
+convert=
+[convert]
+hg.saverev=False
+EOF
+
+hg init orig
+cd orig
+echo foo > foo
+echo bar > bar
+hg ci -qAm 'add foo and bar' -d '0 0'
+
+hg rm foo
+hg ci -m 'remove foo' -d '0 0'
+
+mkdir foo
+echo file > foo/file
+hg ci -qAm 'add foo/file' -d '0 0'
+
+hg tag -d '0 0' some-tag
+
+hg log
+cd ..
+
+hg convert orig new 2>&1 | grep -v 'subversion python bindings could not be loaded'
+cd new
+hg out ../orig
+
+echo '% dirstate should be empty:'
+hg debugstate
+hg parents -q
+
+hg up -C
+hg copy bar baz
+echo '% put something in the dirstate:'
+hg debugstate > debugstate
+grep baz debugstate
+
+echo '% add a new revision in the original repo'
+cd ../orig
+echo baz > baz
+hg ci -qAm 'add baz'
+
+cd ..
+hg convert orig new 2>&1 | grep -v 'subversion python bindings could not be loaded'
+cd new
+hg out ../orig
+echo '% dirstate should be the same (no output below):'
+hg debugstate > new-debugstate
+diff debugstate new-debugstate
+
+echo '% no copies'
+hg up -C
+hg debugrename baz
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-convert-hg-sink.out	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,50 @@
+changeset:   3:593cbf6fb2b4
+tag:         tip
+user:        test
+date:        Thu Jan 01 00:00:00 1970 +0000
+summary:     Added tag some-tag for changeset ad681a868e44
+
+changeset:   2:ad681a868e44
+tag:         some-tag
+user:        test
+date:        Thu Jan 01 00:00:00 1970 +0000
+summary:     add foo/file
+
+changeset:   1:cbba8ecc03b7
+user:        test
+date:        Thu Jan 01 00:00:00 1970 +0000
+summary:     remove foo
+
+changeset:   0:327daa9251fa
+user:        test
+date:        Thu Jan 01 00:00:00 1970 +0000
+summary:     add foo and bar
+
+initializing destination new repository
+scanning source...
+sorting...
+converting...
+3 add foo and bar
+2 remove foo
+1 add foo/file
+0 Added tag some-tag for changeset ad681a868e44
+comparing with ../orig
+searching for changes
+no changes found
+% dirstate should be empty:
+3 files updated, 0 files merged, 0 files removed, 0 files unresolved
+% put something in the dirstate:
+a   0         -1 unset               baz
+copy: bar -> baz
+% add a new revision in the original repo
+scanning source...
+sorting...
+converting...
+0 add baz
+comparing with ../orig
+searching for changes
+no changes found
+% dirstate should be the same (no output below):
+% no copies
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+baz not renamed
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-convert-hg-source	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,40 @@
+#!/bin/sh
+
+cat >> $HGRCPATH <<EOF
+[extensions]
+convert=
+[convert]
+hg.saverev=False
+EOF
+
+hg init orig
+cd orig
+
+echo foo > foo
+echo bar > bar
+hg ci -qAm 'add foo bar' -d '0 0'
+
+echo >> foo
+hg ci -m 'change foo' -d '1 0'
+
+hg up -qC 0
+hg copy --after --force foo bar
+hg copy foo baz
+hg ci -m 'make bar and baz copies of foo' -d '2 0'
+
+hg merge
+hg ci -m 'merge local copy' -d '3 0'
+
+hg up -C 1
+hg merge 2
+hg ci -m 'merge remote copy' -d '4 0'
+
+chmod +x baz
+hg ci -m 'mark baz executable' -d '5 0'
+
+cd ..
+hg convert --datesort orig new 2>&1 | grep -v 'subversion python bindings could not be loaded'
+cd new
+hg out ../orig
+
+true
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-convert-hg-source.out	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,20 @@
+merging baz and foo
+1 files updated, 1 files merged, 0 files removed, 0 files unresolved
+(branch merge, don't forget to commit)
+1 files updated, 0 files merged, 1 files removed, 0 files unresolved
+merging foo and baz
+1 files updated, 1 files merged, 0 files removed, 0 files unresolved
+(branch merge, don't forget to commit)
+initializing destination new repository
+scanning source...
+sorting...
+converting...
+5 add foo bar
+4 change foo
+3 make bar and baz copies of foo
+2 merge local copy
+1 merge remote copy
+0 mark baz executable
+comparing with ../orig
+searching for changes
+no changes found
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-convert-hg-svn	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,73 @@
+#!/bin/sh
+
+"$TESTDIR/hghave" svn svn-bindings || exit 80
+
+fix_path()
+{
+    tr '\\' /
+}
+
+echo "[extensions]" >> $HGRCPATH
+echo "convert = " >> $HGRCPATH
+
+svnpath=`pwd | fix_path`/svn-repo
+svnadmin create $svnpath
+
+cat > $svnpath/hooks/pre-revprop-change <<'EOF'
+#!/bin/sh
+
+REPOS="$1"
+REV="$2"
+USER="$3"
+PROPNAME="$4"
+ACTION="$5"
+
+if [ "$ACTION" = "M" -a "$PROPNAME" = "svn:log" ]; then exit 0; fi
+if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-branch" ]; then exit 0; fi
+if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-rev" ]; then exit 0; fi
+
+echo "Changing prohibited revision property" >&2
+exit 1
+EOF
+chmod +x $svnpath/hooks/pre-revprop-change
+
+# SVN wants all paths to start with a slash. Unfortunately,
+# Windows ones don't. Handle that.
+svnurl=$svnpath
+expr $svnurl : "\/" > /dev/null
+if [ $? -ne 0 ]; then
+    svnurl='/'$svnurl
+fi
+svnurl=file://$svnurl
+svn co $svnurl $svnpath-wc
+
+cd $svnpath-wc
+echo a > a
+svn add a
+svn ci -m'added a' a
+
+cd ..
+
+echo % initial roundtrip
+hg convert -s svn -d hg $svnpath-wc $svnpath-hg | grep -v initializing
+hg convert -s hg -d svn $svnpath-hg $svnpath-wc
+
+echo % second roundtrip should do nothing
+hg convert -s svn -d hg $svnpath-wc $svnpath-hg
+hg convert -s hg -d svn $svnpath-hg $svnpath-wc
+
+echo % new hg rev
+
+hg clone $svnpath-hg $svnpath-work
+echo b > $svnpath-work/b
+hg --cwd $svnpath-work add b
+hg --cwd $svnpath-work ci -mb
+
+echo % echo hg to svn
+hg --cwd $svnpath-hg pull -q $svnpath-work
+hg convert -s hg -d svn $svnpath-hg $svnpath-wc
+
+echo % svn back to hg should do nothing
+hg convert -s svn -d hg $svnpath-wc $svnpath-hg
+echo % hg back to svn should do nothing
+hg convert -s hg -d svn $svnpath-hg $svnpath-wc
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-convert-hg-svn.out	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,35 @@
+Checked out revision 0.
+A         a
+Adding         a
+Transmitting file data .
+Committed revision 1.
+% initial roundtrip
+scanning source...
+sorting...
+converting...
+0 added a
+scanning source...
+sorting...
+converting...
+% second roundtrip should do nothing
+scanning source...
+sorting...
+converting...
+scanning source...
+sorting...
+converting...
+% new hg rev
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+% echo hg to svn
+scanning source...
+sorting...
+converting...
+0 b
+% svn back to hg should do nothing
+scanning source...
+sorting...
+converting...
+% hg back to svn should do nothing
+scanning source...
+sorting...
+converting...
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-convert-svn-branches	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,89 @@
+#!/bin/sh
+
+"$TESTDIR/hghave" svn svn-bindings || exit 80
+
+fix_path()
+{
+    tr '\\' /
+}
+
+echo "[extensions]" >> $HGRCPATH
+echo "convert = " >> $HGRCPATH
+echo "hgext.graphlog =" >> $HGRCPATH
+
+svnadmin create svn-repo
+
+svnpath=`pwd | fix_path`
+# SVN wants all paths to start with a slash. Unfortunately,
+# Windows ones don't. Handle that.
+expr $svnpath : "\/" > /dev/null
+if [ $? -ne 0 ]; then
+    svnpath='/'$svnpath
+fi
+
+echo % initial svn import
+mkdir projA
+cd projA
+mkdir trunk
+mkdir branches
+mkdir tags
+cd ..
+
+svnurl=file://$svnpath/svn-repo/projA
+svn import -m "init projA" projA $svnurl | fix_path
+
+echo % update svn repository
+svn co $svnurl A | fix_path
+cd A
+echo hello > trunk/letter.txt
+echo hey > trunk/letter2.txt
+echo ho > trunk/letter3.txt
+svn add trunk/letter.txt trunk/letter2.txt trunk/letter3.txt
+svn ci -m hello
+
+echo % branch to old letters
+svn copy trunk branches/old
+svn rm branches/old/letter3.txt
+svn ci -m "branch trunk, remove letter3"
+svn up
+
+echo % update trunk
+echo "what can I say ?" >> trunk/letter.txt
+svn ci -m "change letter"
+
+echo % update old branch
+echo "what's up ?" >> branches/old/letter2.txt
+svn ci -m "change letter2"
+
+echo % create a cross-branch revision
+svn move -m "move letter2" trunk/letter2.txt \
+    branches/old/letter3.txt
+echo "I am fine" >> branches/old/letter3.txt
+svn ci -m "move and update letter3.txt"
+
+echo % update old branch again
+echo "bye" >> branches/old/letter2.txt
+svn ci -m "change letter2 again"
+
+echo % update trunk again
+echo "how are you ?" >> trunk/letter.txt
+svn ci -m "last change to letter"
+cd ..
+
+echo % convert trunk and branches
+hg convert --datesort $svnurl A-hg
+
+echo % branch again from a converted revision
+cd A
+svn copy -r 1 $svnurl/trunk branches/old2
+svn ci -m "branch trunk@1 into old2"
+cd ..
+
+echo % convert again
+hg convert --datesort $svnurl A-hg
+
+cd A-hg
+hg glog --template '#rev# #desc|firstline# files: #files#\n'
+hg branches | sed 's/:.*/:/'
+hg tags -q
+cd ..
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-convert-svn-branches.out	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,101 @@
+% initial svn import
+Adding         projA/trunk
+Adding         projA/branches
+Adding         projA/tags
+
+Committed revision 1.
+% update svn repository
+A    A/trunk
+A    A/branches
+A    A/tags
+Checked out revision 1.
+A         trunk/letter.txt
+A         trunk/letter2.txt
+A         trunk/letter3.txt
+Adding         trunk/letter.txt
+Adding         trunk/letter2.txt
+Adding         trunk/letter3.txt
+Transmitting file data ...
+Committed revision 2.
+% branch to old letters
+A         branches/old
+D         branches/old/letter3.txt
+Adding         branches/old
+Adding         branches/old/letter.txt
+Adding         branches/old/letter2.txt
+Deleting       branches/old/letter3.txt
+
+Committed revision 3.
+At revision 3.
+% update trunk
+Sending        trunk/letter.txt
+Transmitting file data .
+Committed revision 4.
+% update old branch
+Sending        branches/old/letter2.txt
+Transmitting file data .
+Committed revision 5.
+% create a cross-branch revision
+A         branches/old/letter3.txt
+D         trunk/letter2.txt
+Adding         branches/old/letter3.txt
+Deleting       trunk/letter2.txt
+Transmitting file data .
+Committed revision 6.
+% update old branch again
+Sending        branches/old/letter2.txt
+Transmitting file data .
+Committed revision 7.
+% update trunk again
+Sending        trunk/letter.txt
+Transmitting file data .
+Committed revision 8.
+% convert trunk and branches
+initializing destination A-hg repository
+scanning source...
+sorting...
+converting...
+8 init projA
+7 hello
+6 branch trunk, remove letter3
+5 change letter
+4 change letter2
+3 move and update letter3.txt
+2 move and update letter3.txt
+1 change letter2 again
+0 last change to letter
+% branch again from a converted revision
+Checked out revision 1.
+A         branches/old2
+Adding         branches/old2
+
+Committed revision 9.
+% convert again
+scanning source...
+sorting...
+converting...
+0 branch trunk@1 into old2
+o  9 branch trunk@1 into old2 files:
+|
+| o  8 last change to letter files: letter.txt
+| |
+| | o  7 change letter2 again files: letter2.txt
+| | |
+| o |  6 move and update letter3.txt files: letter2.txt
+| | |
+| | o  5 move and update letter3.txt files: letter3.txt
+| | |
+| | o  4 change letter2 files: letter2.txt
+| | |
+| o |  3 change letter files: letter.txt
+| | |
++---o  2 branch trunk, remove letter3 files: letter.txt letter2.txt
+| |
+| o  1 hello files: letter.txt letter2.txt letter3.txt
+|/
+o  0 init projA files:
+
+old2                           9:
+default                        8:
+old                            7:
+tip
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-convert-svn-move	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,65 @@
+#!/bin/sh
+
+"$TESTDIR/hghave" svn svn-bindings || exit 80
+
+fix_path()
+{
+    tr '\\' /
+}
+
+echo "[extensions]" >> $HGRCPATH
+echo "convert = " >> $HGRCPATH
+echo "hgext.graphlog =" >> $HGRCPATH
+
+svnadmin create svn-repo
+
+svnpath=`pwd | fix_path`
+# SVN wants all paths to start with a slash. Unfortunately,
+# Windows ones don't. Handle that.
+expr $svnpath : "\/" > /dev/null
+if [ $? -ne 0 ]; then
+    svnpath='/'$svnpath
+fi
+
+echo % initial svn import
+mkdir projA
+cd projA
+mkdir trunk
+echo a > trunk/a
+mkdir trunk/d1
+echo b > trunk/d1/b
+echo c > trunk/d1/c
+cd ..
+
+svnurl=file://$svnpath/svn-repo/projA
+svn import -m "init projA" projA $svnurl | fix_path
+
+# Build a module renaming chain which used to confuse the converter.
+echo % update svn repository
+svn co $svnurl A | fix_path
+cd A
+echo a >> trunk/a
+echo c >> trunk/d1/c
+svn ci -m commitbeforemove
+svn mv $svnurl/trunk $svnurl/subproject -m movedtrunk
+svn up
+mkdir subproject/trunk
+svn add subproject/trunk
+svn ci -m createtrunk
+mkdir subproject/branches
+svn add subproject/branches
+svn ci -m createbranches
+svn mv $svnurl/subproject/d1 $svnurl/subproject/trunk/d1 -m moved1
+svn up
+echo b >> subproject/trunk/d1/b
+svn ci -m changeb
+svn mv $svnurl/subproject/trunk/d1 $svnurl/subproject/branches/d1 -m moved1again
+cd ..
+
+echo % convert trunk and branches
+hg convert --datesort $svnurl/subproject A-hg
+
+cd A-hg
+hg glog --template '#rev# #desc|firstline# files: #files#\n'
+hg branches | sed 's/:.*/:/'
+cd ..
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-convert-svn-move.out	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,76 @@
+% initial svn import
+Adding         projA/trunk
+Adding         projA/trunk/a
+Adding         projA/trunk/d1
+Adding         projA/trunk/d1/b
+Adding         projA/trunk/d1/c
+
+Committed revision 1.
+% update svn repository
+A    A/trunk
+A    A/trunk/a
+A    A/trunk/d1
+A    A/trunk/d1/b
+A    A/trunk/d1/c
+Checked out revision 1.
+Sending        trunk/a
+Sending        trunk/d1/c
+Transmitting file data ..
+Committed revision 2.
+
+Committed revision 3.
+D    trunk
+A    subproject
+A    subproject/a
+A    subproject/d1
+A    subproject/d1/b
+A    subproject/d1/c
+Updated to revision 3.
+A         subproject/trunk
+Adding         subproject/trunk
+
+Committed revision 4.
+A         subproject/branches
+Adding         subproject/branches
+
+Committed revision 5.
+
+Committed revision 6.
+A    subproject/trunk/d1
+A    subproject/trunk/d1/b
+A    subproject/trunk/d1/c
+D    subproject/d1
+Updated to revision 6.
+Sending        subproject/trunk/d1/b
+Transmitting file data .
+Committed revision 7.
+
+Committed revision 8.
+% convert trunk and branches
+initializing destination A-hg repository
+scanning source...
+sorting...
+converting...
+6 createtrunk
+5 moved1
+4 moved1
+3 changeb
+2 changeb
+1 moved1again
+0 moved1again
+o  6 moved1again files: d1/b d1/c
+|
+| o  5 moved1again files:
+| |
+o |  4 changeb files: d1/b
+| |
+| o  3 changeb files: b
+| |
+o |  2 moved1 files: d1/b d1/c
+| |
+| o  1 moved1 files: b c
+|
+o  0 createtrunk files:
+
+default                        6:
+d1                             5:
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-convert-svn-sink	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,135 @@
+#!/bin/sh
+
+"$TESTDIR/hghave" svn svn-bindings || exit 80
+
+fixpath()
+{
+    tr '\\' /
+}
+
+svnupanddisplay()
+{
+    (
+       cd $1; 
+       svn up;
+       svn st -v | fixpath
+       limit=''
+       if [ $2 -gt 0 ]; then
+           limit="--limit=$2"
+       fi
+       svn log --xml -v $limit | fixpath | sed 's,<date>.*,<date/>,'
+    ) 
+}
+
+echo "[extensions]" >> $HGRCPATH
+echo "convert = " >> $HGRCPATH
+
+hg init a
+
+echo a > a/a
+mkdir -p a/d1/d2
+echo b > a/d1/d2/b
+echo % add
+hg --cwd a ci -d '0 0' -A -m 'add a file'
+
+echo a >> a/a
+echo % modify
+hg --cwd a ci -d '1 0' -m 'modify a file'
+hg --cwd a tip -q
+
+hg convert -d svn a
+svnupanddisplay a-hg-wc 2
+ls a a-hg-wc
+cmp a/a a-hg-wc/a && echo same || echo different
+
+hg --cwd a mv a b
+echo % rename
+hg --cwd a ci -d '2 0' -m 'rename a file'
+hg --cwd a tip -q
+
+hg convert -d svn a
+svnupanddisplay a-hg-wc 1
+ls a a-hg-wc
+
+hg --cwd a cp b c
+echo % copy
+hg --cwd a ci -d '3 0' -m 'copy a file'
+hg --cwd a tip -q
+
+hg convert -d svn a
+svnupanddisplay a-hg-wc 1
+ls a a-hg-wc
+
+hg --cwd a rm b
+echo % remove
+hg --cwd a ci -d '4 0' -m 'remove a file'
+hg --cwd a tip -q
+
+hg convert -d svn a
+svnupanddisplay a-hg-wc 1
+ls a a-hg-wc
+
+chmod +x a/c
+echo % executable
+hg --cwd a ci -d '5 0' -m 'make a file executable'
+hg --cwd a tip -q
+
+hg convert -d svn a
+svnupanddisplay a-hg-wc 1
+test -x a-hg-wc/c && echo executable || echo not executable
+
+echo % executable in new directory
+
+rm -rf a a-hg a-hg-wc
+hg init a
+
+mkdir a/d1
+echo a > a/d1/a
+chmod +x a/d1/a
+hg --cwd a ci -d '0 0' -A -m 'add executable file in new directory'
+
+hg convert -d svn a
+svnupanddisplay a-hg-wc 1
+test -x a-hg-wc/d1/a && echo executable || echo not executable
+
+echo % copy to new directory
+
+mkdir a/d2
+hg --cwd a cp d1/a d2/a
+hg --cwd a ci -d '1 0' -A -m 'copy file to new directory'
+
+hg convert -d svn a
+svnupanddisplay a-hg-wc 1
+
+echo % branchy history
+
+hg init b
+echo base > b/b
+hg --cwd b ci -d '0 0' -Ambase
+
+echo left-1 >> b/b
+echo left-1 > b/left-1
+hg --cwd b ci -d '1 0' -Amleft-1
+
+echo left-2 >> b/b
+echo left-2 > b/left-2
+hg --cwd b ci -d '2 0' -Amleft-2
+
+hg --cwd b up 0
+
+echo right-1 >> b/b
+echo right-1 > b/right-1
+hg --cwd b ci -d '3 0' -Amright-1
+
+echo right-2 >> b/b
+echo right-2 > b/right-2
+hg --cwd b ci -d '4 0' -Amright-2
+
+hg --cwd b up -C 2
+hg --cwd b merge
+hg --cwd b revert -r 2 b
+hg --cwd b ci -d '5 0' -m 'merge'
+
+hg convert -d svn b
+echo % expect 4 changes
+svnupanddisplay b-hg-wc 0
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-convert-svn-sink.out	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,338 @@
+% add
+adding a
+adding d1/d2/b
+% modify
+1:e0e2b8a9156b
+assuming destination a-hg
+initializing svn repo 'a-hg'
+initializing svn wc 'a-hg-wc'
+scanning source...
+sorting...
+converting...
+1 add a file
+0 modify a file
+At revision 2.
+                2        2 test         .
+                2        2 test         a
+                2        1 test         d1
+                2        1 test         d1/d2
+                2        1 test         d1/d2/b
+<?xml version="1.0"?>
+<log>
+<logentry
+   revision="2">
+<author>test</author>
+<date/>
+<paths>
+<path
+   action="M">/a</path>
+</paths>
+<msg>modify a file</msg>
+</logentry>
+<logentry
+   revision="1">
+<author>test</author>
+<date/>
+<paths>
+<path
+   action="A">/a</path>
+<path
+   action="A">/d1</path>
+<path
+   action="A">/d1/d2</path>
+<path
+   action="A">/d1/d2/b</path>
+</paths>
+<msg>add a file</msg>
+</logentry>
+</log>
+a:
+a
+d1
+
+a-hg-wc:
+a
+d1
+same
+% rename
+2:7009fc4efb34
+assuming destination a-hg
+initializing svn wc 'a-hg-wc'
+scanning source...
+sorting...
+converting...
+0 rename a file
+At revision 3.
+                3        3 test         .
+                3        3 test         b
+                3        1 test         d1
+                3        1 test         d1/d2
+                3        1 test         d1/d2/b
+<?xml version="1.0"?>
+<log>
+<logentry
+   revision="3">
+<author>test</author>
+<date/>
+<paths>
+<path
+   action="D">/a</path>
+<path
+   copyfrom-path="/a"
+   copyfrom-rev="2"
+   action="A">/b</path>
+</paths>
+<msg>rename a file</msg>
+</logentry>
+</log>
+a:
+b
+d1
+
+a-hg-wc:
+b
+d1
+% copy
+3:56c519973ce6
+assuming destination a-hg
+initializing svn wc 'a-hg-wc'
+scanning source...
+sorting...
+converting...
+0 copy a file
+At revision 4.
+                4        4 test         .
+                4        3 test         b
+                4        4 test         c
+                4        1 test         d1
+                4        1 test         d1/d2
+                4        1 test         d1/d2/b
+<?xml version="1.0"?>
+<log>
+<logentry
+   revision="4">
+<author>test</author>
+<date/>
+<paths>
+<path
+   copyfrom-path="/b"
+   copyfrom-rev="3"
+   action="A">/c</path>
+</paths>
+<msg>copy a file</msg>
+</logentry>
+</log>
+a:
+b
+c
+d1
+
+a-hg-wc:
+b
+c
+d1
+% remove
+4:ed4dc9a6f585
+assuming destination a-hg
+initializing svn wc 'a-hg-wc'
+scanning source...
+sorting...
+converting...
+0 remove a file
+At revision 5.
+                5        5 test         .
+                5        4 test         c
+                5        1 test         d1
+                5        1 test         d1/d2
+                5        1 test         d1/d2/b
+<?xml version="1.0"?>
+<log>
+<logentry
+   revision="5">
+<author>test</author>
+<date/>
+<paths>
+<path
+   action="D">/b</path>
+</paths>
+<msg>remove a file</msg>
+</logentry>
+</log>
+a:
+c
+d1
+
+a-hg-wc:
+c
+d1
+% executable
+5:f205b3636d77
+assuming destination a-hg
+initializing svn wc 'a-hg-wc'
+scanning source...
+sorting...
+converting...
+0 make a file executable
+At revision 6.
+                6        6 test         .
+                6        6 test         c
+                6        1 test         d1
+                6        1 test         d1/d2
+                6        1 test         d1/d2/b
+<?xml version="1.0"?>
+<log>
+<logentry
+   revision="6">
+<author>test</author>
+<date/>
+<paths>
+<path
+   action="M">/c</path>
+</paths>
+<msg>make a file executable</msg>
+</logentry>
+</log>
+executable
+% executable in new directory
+adding d1/a
+assuming destination a-hg
+initializing svn repo 'a-hg'
+initializing svn wc 'a-hg-wc'
+scanning source...
+sorting...
+converting...
+0 add executable file in new directory
+At revision 1.
+                1        1 test         .
+                1        1 test         d1
+                1        1 test         d1/a
+<?xml version="1.0"?>
+<log>
+<logentry
+   revision="1">
+<author>test</author>
+<date/>
+<paths>
+<path
+   action="A">/d1</path>
+<path
+   action="A">/d1/a</path>
+</paths>
+<msg>add executable file in new directory</msg>
+</logentry>
+</log>
+executable
+% copy to new directory
+assuming destination a-hg
+initializing svn wc 'a-hg-wc'
+scanning source...
+sorting...
+converting...
+0 copy file to new directory
+At revision 2.
+                2        2 test         .
+                2        1 test         d1
+                2        1 test         d1/a
+                2        2 test         d2
+                2        2 test         d2/a
+<?xml version="1.0"?>
+<log>
+<logentry
+   revision="2">
+<author>test</author>
+<date/>
+<paths>
+<path
+   action="A">/d2</path>
+<path
+   copyfrom-path="/d1/a"
+   copyfrom-rev="1"
+   action="A">/d2/a</path>
+</paths>
+<msg>copy file to new directory</msg>
+</logentry>
+</log>
+% branchy history
+adding b
+adding left-1
+adding left-2
+1 files updated, 0 files merged, 2 files removed, 0 files unresolved
+adding right-1
+adding right-2
+3 files updated, 0 files merged, 2 files removed, 0 files unresolved
+merging b
+warning: conflicts during merge.
+merging b failed!
+2 files updated, 0 files merged, 0 files removed, 1 files unresolved
+There are unresolved merges, you can redo the full merge using:
+  hg update -C 2
+  hg merge 4
+assuming destination b-hg
+initializing svn repo 'b-hg'
+initializing svn wc 'b-hg-wc'
+scanning source...
+sorting...
+converting...
+5 base
+4 left-1
+3 left-2
+2 right-1
+1 right-2
+0 merge
+% expect 4 changes
+At revision 4.
+                4        4 test         .
+                4        3 test         b
+                4        2 test         left-1
+                4        3 test         left-2
+                4        4 test         right-1
+                4        4 test         right-2
+<?xml version="1.0"?>
+<log>
+<logentry
+   revision="4">
+<author>test</author>
+<date/>
+<paths>
+<path
+   action="A">/right-1</path>
+<path
+   action="A">/right-2</path>
+</paths>
+<msg>merge</msg>
+</logentry>
+<logentry
+   revision="3">
+<author>test</author>
+<date/>
+<paths>
+<path
+   action="M">/b</path>
+<path
+   action="A">/left-2</path>
+</paths>
+<msg>left-2</msg>
+</logentry>
+<logentry
+   revision="2">
+<author>test</author>
+<date/>
+<paths>
+<path
+   action="M">/b</path>
+<path
+   action="A">/left-1</path>
+</paths>
+<msg>left-1</msg>
+</logentry>
+<logentry
+   revision="1">
+<author>test</author>
+<date/>
+<paths>
+<path
+   action="A">/b</path>
+</paths>
+<msg>base</msg>
+</logentry>
+</log>
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-convert-svn-source	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,180 @@
+#!/bin/sh
+
+"$TESTDIR/hghave" svn svn-bindings || exit 80
+
+fix_path()
+{
+    tr '\\' /
+}
+
+echo "[extensions]" >> $HGRCPATH
+echo "convert = " >> $HGRCPATH
+
+svnadmin create svn-repo
+
+echo % initial svn import
+mkdir t
+cd t
+echo a > a
+cd ..
+
+svnpath=`pwd | fix_path`
+# SVN wants all paths to start with a slash. Unfortunately,
+# Windows ones don't. Handle that.
+expr $svnpath : "\/" > /dev/null
+if [ $? -ne 0 ]; then
+    svnpath='/'$svnpath
+fi
+
+svnurl=file://$svnpath/svn-repo/trunk/test
+svn import -m init t $svnurl | fix_path
+
+echo % update svn repository
+svn co $svnurl t2 | fix_path
+cd t2
+echo b >> a
+echo b > b
+svn add b
+svn ci -m changea
+cd ..
+
+echo % convert to hg once
+hg convert $svnurl
+
+echo % update svn repository again
+cd t2
+echo c >> a
+echo c >> b
+svn ci -m changeb
+cd ..
+
+echo % test incremental conversion
+hg convert -v $svnurl | sed 's/source:.*/source:/'
+
+echo % test filemap
+echo 'include b' > filemap
+hg convert --filemap filemap $svnurl fmap
+echo '[extensions]' >> $HGRCPATH
+echo 'hgext.graphlog =' >> $HGRCPATH
+hg glog -R fmap --template '#rev# #desc|firstline# files: #files#\n'
+
+echo % test stop revision
+hg convert --rev 1 $svnurl stoprev
+# Check convert_revision extra-records.
+# This is also the only place testing more than one extra field
+# in a revision.
+hg --cwd stoprev tip --debug | grep extra | sed 's/=.*/=/'
+
+########################################
+
+echo "# now tests that it works with trunk/branches/tags layout"
+echo
+echo % initial svn import
+mkdir projA
+cd projA
+mkdir trunk
+mkdir branches
+mkdir tags
+cd ..
+
+svnurl=file://$svnpath/svn-repo/projA
+svn import -m "init projA" projA $svnurl | fix_path
+
+
+echo % update svn repository
+svn co $svnurl/trunk A | fix_path
+cd A
+echo hello > letter.txt
+svn add letter.txt
+svn ci -m hello
+
+echo world >> letter.txt
+svn ci -m world
+
+svn copy -m "tag v0.1" $svnurl/trunk $svnurl/tags/v0.1
+
+echo 'nice day today!' >> letter.txt
+svn ci -m "nice day"
+cd ..
+
+echo % convert to hg once
+hg convert $svnurl A-hg
+
+echo % update svn repository again
+cd A
+echo "see second letter" >> letter.txt
+# Put it in a subdirectory to test duplicate file records
+# from svn source (issue 714)
+mkdir todo
+echo "nice to meet you" > todo/letter2.txt
+svn add todo
+svn ci -m "second letter"
+
+svn copy -m "tag v0.2" $svnurl/trunk $svnurl/tags/v0.2
+
+echo "blah-blah-blah" >> todo/letter2.txt
+svn ci -m "work in progress"
+cd ..
+
+echo % test incremental conversion
+hg convert $svnurl A-hg
+
+cd A-hg
+hg glog --template '#rev# #desc|firstline# files: #files#\n'
+hg tags -q
+cd ..
+
+########################################
+
+echo "# now tests that it works with trunk/tags layout, but no branches yet"
+echo
+echo % initial svn import
+mkdir projB
+cd projB
+mkdir trunk
+mkdir tags
+cd ..
+
+svnurl=file://$svnpath/svn-repo/projB
+svn import -m "init projB" projB $svnurl | fix_path
+
+
+echo % update svn repository
+svn co $svnurl/trunk B | fix_path
+cd B
+echo hello > letter.txt
+svn add letter.txt
+svn ci -m hello
+
+echo world >> letter.txt
+svn ci -m world
+
+svn copy -m "tag v0.1" $svnurl/trunk $svnurl/tags/v0.1
+
+echo 'nice day today!' >> letter.txt
+svn ci -m "nice day"
+cd ..
+
+echo % convert to hg once
+hg convert $svnurl B-hg
+
+echo % update svn repository again
+cd B
+echo "see second letter" >> letter.txt
+echo "nice to meet you" > letter2.txt
+svn add letter2.txt
+svn ci -m "second letter"
+
+svn copy -m "tag v0.2" $svnurl/trunk $svnurl/tags/v0.2
+
+echo "blah-blah-blah" >> letter2.txt
+svn ci -m "work in progress"
+cd ..
+
+echo % test incremental conversion
+hg convert $svnurl B-hg
+
+cd B-hg
+hg glog --template '#rev# #desc|firstline# files: #files#\n'
+hg tags -q
+cd ..
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-convert-svn-source.out	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,195 @@
+% initial svn import
+Adding         t/a
+
+Committed revision 1.
+% update svn repository
+A    t2/a
+Checked out revision 1.
+A         b
+Sending        a
+Adding         b
+Transmitting file data ..
+Committed revision 2.
+% convert to hg once
+assuming destination test-hg
+initializing destination test-hg repository
+scanning source...
+sorting...
+converting...
+1 init
+0 changea
+% update svn repository again
+Sending        a
+Sending        b
+Transmitting file data ..
+Committed revision 3.
+% test incremental conversion
+assuming destination test-hg
+scanning source...
+fetching revision log for "/trunk/test" from 3 to 2
+sorting...
+converting...
+0 changeb
+source:
+a
+b
+no tags found at revision 3
+% test filemap
+initializing destination fmap repository
+scanning source...
+sorting...
+converting...
+2 init
+1 changea
+0 changeb
+o  1 changeb files: b
+|
+o  0 changea files: b
+
+% test stop revision
+initializing destination stoprev repository
+scanning source...
+sorting...
+converting...
+0 init
+extra:       branch=
+extra:       convert_revision=
+# now tests that it works with trunk/branches/tags layout
+
+% initial svn import
+Adding         projA/trunk
+Adding         projA/branches
+Adding         projA/tags
+
+Committed revision 4.
+% update svn repository
+Checked out revision 4.
+A         letter.txt
+Adding         letter.txt
+Transmitting file data .
+Committed revision 5.
+Sending        letter.txt
+Transmitting file data .
+Committed revision 6.
+
+Committed revision 7.
+Sending        letter.txt
+Transmitting file data .
+Committed revision 8.
+% convert to hg once
+initializing destination A-hg repository
+scanning source...
+sorting...
+converting...
+3 init projA
+2 hello
+1 world
+0 nice day
+updating tags
+% update svn repository again
+A         todo
+A         todo/letter2.txt
+Sending        letter.txt
+Adding         todo
+Adding         todo/letter2.txt
+Transmitting file data ..
+Committed revision 9.
+
+Committed revision 10.
+Sending        todo/letter2.txt
+Transmitting file data .
+Committed revision 11.
+% test incremental conversion
+scanning source...
+sorting...
+converting...
+1 second letter
+0 work in progress
+updating tags
+o  7 update tags files: .hgtags
+|
+o  6 work in progress files: todo/letter2.txt
+|
+o  5 second letter files: letter.txt todo/letter2.txt
+|
+o  4 update tags files: .hgtags
+|
+o  3 nice day files: letter.txt
+|
+o  2 world files: letter.txt
+|
+o  1 hello files: letter.txt
+|
+o  0 init projA files:
+
+tip
+v0.2
+v0.1
+# now tests that it works with trunk/tags layout, but no branches yet
+
+% initial svn import
+Adding         projB/trunk
+Adding         projB/tags
+
+Committed revision 12.
+% update svn repository
+Checked out revision 12.
+A         letter.txt
+Adding         letter.txt
+Transmitting file data .
+Committed revision 13.
+Sending        letter.txt
+Transmitting file data .
+Committed revision 14.
+
+Committed revision 15.
+Sending        letter.txt
+Transmitting file data .
+Committed revision 16.
+% convert to hg once
+initializing destination B-hg repository
+scanning source...
+sorting...
+converting...
+3 init projB
+2 hello
+1 world
+0 nice day
+updating tags
+% update svn repository again
+A         letter2.txt
+Sending        letter.txt
+Adding         letter2.txt
+Transmitting file data ..
+Committed revision 17.
+
+Committed revision 18.
+Sending        letter2.txt
+Transmitting file data .
+Committed revision 19.
+% test incremental conversion
+scanning source...
+sorting...
+converting...
+1 second letter
+0 work in progress
+updating tags
+o  7 update tags files: .hgtags
+|
+o  6 work in progress files: letter2.txt
+|
+o  5 second letter files: letter.txt letter2.txt
+|
+o  4 update tags files: .hgtags
+|
+o  3 nice day files: letter.txt
+|
+o  2 world files: letter.txt
+|
+o  1 hello files: letter.txt
+|
+o  0 init projB files:
+
+tip
+v0.2
+v0.1
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-convert.out	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,119 @@
+hg convert [OPTION]... SOURCE [DEST [MAPFILE]]
+
+Convert a foreign SCM repository to a Mercurial one.
+
+    Accepted source formats:
+    - Mercurial
+    - CVS
+    - Darcs
+    - git
+    - Subversion
+
+    Accepted destination formats:
+    - Mercurial
+    - Subversion (history on branches is not preserved)
+
+    If no revision is given, all revisions will be converted. Otherwise,
+    convert will only import up to the named revision (given in a format
+    understood by the source).
+
+    If no destination directory name is specified, it defaults to the
+    basename of the source with '-hg' appended.  If the destination
+    repository doesn't exist, it will be created.
+
+    If <MAPFILE> isn't given, it will be put in a default location
+    (<dest>/.hg/shamap by default).  The <MAPFILE> is a simple text
+    file that maps each source commit ID to the destination ID for
+    that revision, like so:
+    <source ID> <destination ID>
+
+    If the file doesn't exist, it's automatically created.  It's updated
+    on each commit copied, so convert-repo can be interrupted and can
+    be run repeatedly to copy new commits.
+
+    The [username mapping] file is a simple text file that maps each source
+    commit author to a destination commit author. It is handy for source SCMs
+    that use unix logins to identify authors (eg: CVS). One line per author
+    mapping and the line format is:
+    srcauthor=whatever string you want
+
+    The filemap is a file that allows filtering and remapping of files
+    and directories.  Comment lines start with '#'.  Each line can
+    contain one of the following directives:
+
+      include path/to/file
+
+      exclude path/to/file
+
+      rename from/file to/file
+
+    The 'include' directive causes a file, or all files under a
+    directory, to be included in the destination repository, and the
+    exclusion of all other files and dirs not explicitely included.
+    The 'exclude' directive causes files or directories to be omitted.
+    The 'rename' directive renames a file or directory.  To rename from a
+    subdirectory into the root of the repository, use '.' as the path to
+    rename to.
+
+    Back end options:
+
+    --config convert.hg.clonebranches=False   (boolean)
+        hg target: XXX not documented
+    --config convert.hg.saverev=True          (boolean)
+        hg source: allow target to preserve source revision ID
+    --config convert.hg.tagsbranch=default    (branch name)
+        hg target: XXX not documented
+    --config convert.hg.usebranchnames=True   (boolean)
+        hg target: preserve branch names
+
+    --config convert.svn.branches=branches    (directory name)
+        svn source: specify the directory containing branches
+    --config convert.svn.tags=tags            (directory name)
+        svn source: specify the directory containing tags
+    --config convert.svn.trunk=trunk          (directory name)
+        svn source: specify the name of the trunk branch
+
+options:
+
+ -A --authors      username mapping filename
+ -d --dest-type    destination repository type
+    --filemap      remap file names using contents of file
+ -r --rev          import up to target revision REV
+ -s --source-type  source repository type
+    --datesort     try to sort changesets by date
+
+use "hg -v help convert" to show global options
+adding a
+assuming destination a-hg
+initializing destination a-hg repository
+scanning source...
+sorting...
+converting...
+4 a
+3 b
+2 c
+1 d
+0 e
+pulling from ../a
+searching for changes
+no changes found
+% should fail
+initializing destination bogusfile repository
+abort: cannot create new bundle repository
+% should fail
+abort: Permission denied: bogusdir
+% should succeed
+initializing destination bogusdir repository
+scanning source...
+sorting...
+converting...
+4 a
+3 b
+2 c
+1 d
+0 e
+% test pre and post conversion actions
+run hg source pre-conversion action
+run hg sink pre-conversion action
+run hg sink post-conversion action
+run hg source post-conversion action
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-copy-move-merge	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,30 @@
+#!/bin/sh
+
+mkdir t
+cd t
+hg init
+
+echo 1 > a
+hg ci -qAm "first" -d "1000000 0"
+
+hg cp a b
+hg mv a c
+echo 2 >> b
+echo 2 >> c
+
+hg ci -qAm "second" -d "1000000 0"
+
+hg co -C 0
+
+echo 0 > a
+echo 1 >> a
+
+hg ci -qAm "other" -d "1000000 0"
+
+hg merge --debug
+
+echo "-- b --"
+cat b
+
+echo "-- c --"
+cat c
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-copy-move-merge.out	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,35 @@
+1 files updated, 0 files merged, 2 files removed, 0 files unresolved
+resolving manifests
+ overwrite None partial False
+ ancestor 583c7b748052 local fb3948d97f07+ remote 40da226db0f0
+  searching for copies back to rev 1
+  unmatched files in other:
+   b
+   c
+  all copies found (* = to merge, ! = divergent):
+   c -> a *
+   b -> a *
+  checking for directory renames
+ a: remote moved to c -> m
+ a: remote moved to b -> m
+copying a to b
+copying a to c
+picked tool 'internal:merge' for a (binary False symlink False)
+merging a and b
+my a@fb3948d97f07+ other b@40da226db0f0 ancestor a@583c7b748052
+ premerge successful
+removing a
+picked tool 'internal:merge' for a (binary False symlink False)
+merging a and c
+my a@fb3948d97f07+ other c@40da226db0f0 ancestor a@583c7b748052
+ premerge successful
+0 files updated, 2 files merged, 0 files removed, 0 files unresolved
+(branch merge, don't forget to commit)
+-- b --
+0
+1
+2
+-- c --
+0
+1
+2
--- a/tests/test-debugcomplete.out	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-debugcomplete.out	Wed Feb 06 19:57:52 2008 -0800
@@ -4,6 +4,7 @@
 annotate
 archive
 backout
+bisect
 branch
 branches
 bundle
@@ -62,6 +63,7 @@
 debugconfig
 debugdata
 debugdate
+debugfsinfo
 debugindex
 debugindexdot
 debuginstall
@@ -127,6 +129,7 @@
 --noninteractive
 --pid-file
 --port
+--prefix
 --profile
 --quiet
 --repository
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-debugindexdot	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,17 @@
+#!/bin/sh
+
+# Just exercize debugindexdot
+# Create a short file history including a merge.
+hg init t
+cd t
+echo a > a
+hg ci -qAm t1 -d '0 0'
+echo a >> a
+hg ci -m t2 -d '1 0'
+hg up -qC 0
+echo b >> a
+hg ci -m t3 -d '2 0'
+HGMERGE=true hg merge -q
+hg ci -m merge -d '3 0'
+
+hg debugindexdot .hg/store/data/a.i
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-debugindexdot.out	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,7 @@
+digraph G {
+	-1 -> 0
+	0 -> 1
+	0 -> 2
+	2 -> 3
+	1 -> 3
+}
--- a/tests/test-diff-hashes	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-diff-hashes	Wed Feb 06 19:57:52 2008 -0800
@@ -2,6 +2,7 @@
 
 hg init a
 cd a
+hg diff not found
 echo bar > foo
 hg add foo
 hg ci -m 'add foo' -d '1000000 0'
--- a/tests/test-diff-hashes.out	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-diff-hashes.out	Wed Feb 06 19:57:52 2008 -0800
@@ -1,7 +1,9 @@
+found: No such file or directory
+not: No such file or directory
 quiet:
 --- a/foo	Mon Jan 12 13:46:40 1970 +0000
 +++ b/foo	Mon Jan 12 13:46:41 1970 +0000
-@@ -1,1 +1,1 @@ bar
+@@ -1,1 +1,1 @@
 -bar
 +foobar
 
@@ -9,7 +11,7 @@
 diff -r 74de3f1392e2 -r b8b5f023a6ad foo
 --- a/foo	Mon Jan 12 13:46:40 1970 +0000
 +++ b/foo	Mon Jan 12 13:46:41 1970 +0000
-@@ -1,1 +1,1 @@ bar
+@@ -1,1 +1,1 @@
 -bar
 +foobar
 
@@ -17,7 +19,7 @@
 diff -r 74de3f1392e2 -r b8b5f023a6ad foo
 --- a/foo	Mon Jan 12 13:46:40 1970 +0000
 +++ b/foo	Mon Jan 12 13:46:41 1970 +0000
-@@ -1,1 +1,1 @@ bar
+@@ -1,1 +1,1 @@
 -bar
 +foobar
 
@@ -25,7 +27,7 @@
 diff -r 74de3f1392e2d67856fb155963441f2610494e1a -r b8b5f023a6ad77fc378bd95cf3fa00cd1414d107 foo
 --- a/foo	Mon Jan 12 13:46:40 1970 +0000
 +++ b/foo	Mon Jan 12 13:46:41 1970 +0000
-@@ -1,1 +1,1 @@ bar
+@@ -1,1 +1,1 @@
 -bar
 +foobar
 
--- a/tests/test-diff-ignore-whitespace.out	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-diff-ignore-whitespace.out	Wed Feb 06 19:57:52 2008 -0800
@@ -4,7 +4,7 @@
 diff -r 540c40a65b78 foo
 --- a/foo
 +++ b/foo
-@@ -1,2 +1,5 @@ hello world
+@@ -1,2 +1,5 @@
 +
  hello world
 +
@@ -14,7 +14,7 @@
 diff -r 540c40a65b78 foo
 --- a/foo
 +++ b/foo
-@@ -1,2 +1,5 @@ hello world
+@@ -1,2 +1,5 @@
 +
  hello world
 +
@@ -28,7 +28,7 @@
 diff -r 540c40a65b78 foo
 --- a/foo
 +++ b/foo
-@@ -1,2 +1,2 @@ hello world
+@@ -1,2 +1,2 @@
 -hello world
 +	 hello world
  goodbye world
@@ -36,7 +36,7 @@
 diff -r 540c40a65b78 foo
 --- a/foo
 +++ b/foo
-@@ -1,2 +1,2 @@ hello world
+@@ -1,2 +1,2 @@
 -hello world
 +	 hello world
  goodbye world
@@ -44,7 +44,7 @@
 diff -r 540c40a65b78 foo
 --- a/foo
 +++ b/foo
-@@ -1,2 +1,2 @@ hello world
+@@ -1,2 +1,2 @@
 -hello world
 +	 hello world
  goodbye world
@@ -52,7 +52,7 @@
 diff -r 540c40a65b78 foo
 --- a/foo
 +++ b/foo
-@@ -1,2 +1,2 @@ hello world
+@@ -1,2 +1,2 @@
 -hello world
 +	 hello world
  goodbye world
@@ -61,7 +61,7 @@
 diff -r 540c40a65b78 foo
 --- a/foo
 +++ b/foo
-@@ -1,2 +1,2 @@ hello world
+@@ -1,2 +1,2 @@
 -hello world
 +hello world	 
  goodbye world
@@ -69,7 +69,7 @@
 diff -r 540c40a65b78 foo
 --- a/foo
 +++ b/foo
-@@ -1,2 +1,2 @@ hello world
+@@ -1,2 +1,2 @@
 -hello world
 +hello world	 
  goodbye world
@@ -81,7 +81,7 @@
 diff -r 540c40a65b78 foo
 --- a/foo
 +++ b/foo
-@@ -1,2 +1,2 @@ hello world
+@@ -1,2 +1,2 @@
  hello world
 -goodbye world
 +good bye world
@@ -89,7 +89,7 @@
 diff -r 540c40a65b78 foo
 --- a/foo
 +++ b/foo
-@@ -1,2 +1,2 @@ hello world
+@@ -1,2 +1,2 @@
  hello world
 -goodbye world
 +good bye world
@@ -97,7 +97,7 @@
 diff -r 540c40a65b78 foo
 --- a/foo
 +++ b/foo
-@@ -1,2 +1,2 @@ hello world
+@@ -1,2 +1,2 @@
  hello world
 -goodbye world
 +good bye world
@@ -105,7 +105,7 @@
 diff -r 540c40a65b78 foo
 --- a/foo
 +++ b/foo
-@@ -1,2 +1,2 @@ hello world
+@@ -1,2 +1,2 @@
  hello world
 -goodbye world
 +good bye world
@@ -114,7 +114,7 @@
 diff -r 540c40a65b78 foo
 --- a/foo
 +++ b/foo
-@@ -1,2 +1,2 @@ hello world
+@@ -1,2 +1,2 @@
  hello world
 -goodbye world
 +goodbye		  	world
@@ -122,7 +122,7 @@
 diff -r 540c40a65b78 foo
 --- a/foo
 +++ b/foo
-@@ -1,2 +1,2 @@ hello world
+@@ -1,2 +1,2 @@
  hello world
 -goodbye world
 +goodbye		  	world
@@ -134,7 +134,7 @@
 diff -r 540c40a65b78 foo
 --- a/foo
 +++ b/foo
-@@ -1,2 +1,3 @@ hello world
+@@ -1,2 +1,3 @@
  hello world
 + 	
  goodbye world
@@ -142,7 +142,7 @@
 diff -r 540c40a65b78 foo
 --- a/foo
 +++ b/foo
-@@ -1,2 +1,3 @@ hello world
+@@ -1,2 +1,3 @@
  hello world
 + 	
  goodbye world
@@ -150,7 +150,7 @@
 diff -r 540c40a65b78 foo
 --- a/foo
 +++ b/foo
-@@ -1,2 +1,3 @@ hello world
+@@ -1,2 +1,3 @@
  hello world
 + 	
  goodbye world
@@ -160,7 +160,7 @@
 diff -r 540c40a65b78 foo
 --- a/foo
 +++ b/foo
-@@ -1,2 +1,3 @@ hello world
+@@ -1,2 +1,3 @@
 -hello world
 -goodbye world
 +hello  world
@@ -170,7 +170,7 @@
 diff -r 540c40a65b78 foo
 --- a/foo
 +++ b/foo
-@@ -1,2 +1,3 @@ hello world
+@@ -1,2 +1,3 @@
 -hello world
 -goodbye world
 +hello  world
@@ -180,7 +180,7 @@
 diff -r 540c40a65b78 foo
 --- a/foo
 +++ b/foo
-@@ -1,2 +1,3 @@ hello world
+@@ -1,2 +1,3 @@
 -hello world
 -goodbye world
 +hello  world
@@ -192,7 +192,7 @@
 diff -r 540c40a65b78 foo
 --- a/foo
 +++ b/foo
-@@ -1,2 +1,2 @@ hello world
+@@ -1,2 +1,2 @@
 -hello world
 -goodbye world
 +helloworld
@@ -201,7 +201,7 @@
 diff -r 540c40a65b78 foo
 --- a/foo
 +++ b/foo
-@@ -1,2 +1,2 @@ hello world
+@@ -1,2 +1,2 @@
 -hello world
 -goodbye world
 +helloworld
@@ -210,7 +210,7 @@
 diff -r 540c40a65b78 foo
 --- a/foo
 +++ b/foo
-@@ -1,2 +1,2 @@ hello world
+@@ -1,2 +1,2 @@
 -hello world
 -goodbye world
 +helloworld
@@ -219,7 +219,7 @@
 diff -r 540c40a65b78 foo
 --- a/foo
 +++ b/foo
-@@ -1,2 +1,2 @@ hello world
+@@ -1,2 +1,2 @@
 -hello world
 -goodbye world
 +helloworld
@@ -230,7 +230,7 @@
 diff -r 540c40a65b78 foo
 --- a/foo
 +++ b/foo
-@@ -1,2 +1,5 @@ hello world
+@@ -1,2 +1,5 @@
 -hello world
 -goodbye world
 +helloworld
@@ -242,7 +242,7 @@
 diff -r 540c40a65b78 foo
 --- a/foo
 +++ b/foo
-@@ -1,2 +1,5 @@ hello world
+@@ -1,2 +1,5 @@
 -hello world
 -goodbye world
 +helloworld
@@ -254,7 +254,7 @@
 diff -r 540c40a65b78 foo
 --- a/foo
 +++ b/foo
-@@ -1,2 +1,5 @@ hello world
+@@ -1,2 +1,5 @@
 -hello world
 -goodbye world
 +helloworld
@@ -266,7 +266,7 @@
 diff -r 540c40a65b78 foo
 --- a/foo
 +++ b/foo
-@@ -1,2 +1,5 @@ hello world
+@@ -1,2 +1,5 @@
 -hello world
 -goodbye world
 +helloworld
@@ -278,7 +278,7 @@
 diff -r 540c40a65b78 foo
 --- a/foo
 +++ b/foo
-@@ -1,2 +1,5 @@ hello world
+@@ -1,2 +1,5 @@
 -hello world
 -goodbye world
 +helloworld
--- a/tests/test-diff-newlines.out	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-diff-newlines.out	Wed Feb 06 19:57:52 2008 -0800
@@ -2,7 +2,7 @@
 diff -r 107ba6f817b5 -r 310ce7989cdc a
 --- a/a	Thu Jan 01 00:00:01 1970 +0000
 +++ b/a	Thu Jan 01 00:00:02 1970 +0000
-@@ -1,2 +1,3 @@ confuse str.splitlines
+@@ -1,2 +1,3 @@
  confuse str.splitlines
  embedded
newline
 +clean diff
--- a/tests/test-dispatch	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-dispatch	Wed Feb 06 19:57:52 2008 -0800
@@ -11,7 +11,7 @@
 
 echo '% [defaults]'
 hg cat a
-cat > $HGRCPATH <<EOF
+cat >> $HGRCPATH <<EOF
 [defaults]
 cat = -v
 EOF
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-dispatch.py	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,32 @@
+import os
+from mercurial import dispatch
+
+def testdispatch(cmd):
+    """Simple wrapper around dispatch.dispatch()
+
+    Prints command and result value, but does not handle quoting.
+    """
+    print "running: %s" % (cmd,)
+    result = dispatch.dispatch(cmd.split())
+    print "result: %r" % (result,)
+
+
+testdispatch("init test1")
+os.chdir('test1')
+
+# create file 'foo', add and commit
+f = file('foo', 'wb')
+f.write('foo\n')
+f.close()
+testdispatch("add foo")
+testdispatch("commit -m commit1 -d 2000-01-01 foo")
+
+# append to file 'foo' and commit
+f = file('foo', 'ab')
+f.write('bar\n')
+f.close()
+testdispatch("commit -m commit2 -d 2000-01-02 foo")
+
+# check 88803a69b24 (fancyopts modified command table)
+testdispatch("log -r 0")
+testdispatch("log -r tip")
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-dispatch.py.out	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,23 @@
+running: init test1
+result: None
+running: add foo
+result: 0
+running: commit -m commit1 -d 2000-01-01 foo
+result: None
+running: commit -m commit2 -d 2000-01-02 foo
+result: None
+running: log -r 0
+changeset:   0:0e4634943879
+user:        test
+date:        Sat Jan 01 00:00:00 2000 +0000
+summary:     commit1
+
+result: None
+running: log -r tip
+changeset:   1:45589e459b2e
+tag:         tip
+user:        test
+date:        Sun Jan 02 00:00:00 2000 +0000
+summary:     commit2
+
+result: None
--- a/tests/test-doctest.py	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-doctest.py	Wed Feb 06 19:57:52 2008 -0800
@@ -5,3 +5,8 @@
 
 doctest.testmod(mercurial.changelog)
 
+import mercurial.httprepo
+doctest.testmod(mercurial.httprepo)
+
+import mercurial.util
+doctest.testmod(mercurial.util)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-double-merge	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,30 @@
+#!/bin/sh
+
+hg init repo
+cd repo
+
+echo line 1 > foo
+hg ci -qAm 'add foo' -d "1000000 0"
+
+# copy foo to bar and change both files
+hg cp foo bar
+echo line 2-1 >> foo
+echo line 2-2 >> bar
+hg ci -m 'cp foo bar; change both' -d "1000000 0"
+
+# in another branch, change foo in a way that doesn't conflict with
+# the other changes
+hg up -qC 0
+echo line 0 > foo
+hg cat foo >> foo
+hg ci -m 'change foo' -d "1000000 0"
+
+# we get conflicts that shouldn't be there
+hg merge --debug
+
+echo "-- foo --"
+cat foo
+
+echo "-- bar --"
+cat bar
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-double-merge.out	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,30 @@
+resolving manifests
+ overwrite None partial False
+ ancestor 310fd17130da local 2092631ce82b+ remote 7731dad1c2b9
+  searching for copies back to rev 1
+  unmatched files in other:
+   bar
+  all copies found (* = to merge, ! = divergent):
+   bar -> foo *
+  checking for directory renames
+ foo: versions differ -> m
+ foo: remote copied to bar -> m
+copying foo to bar
+picked tool 'internal:merge' for foo (binary False symlink False)
+merging foo and bar
+my foo@2092631ce82b+ other bar@7731dad1c2b9 ancestor foo@310fd17130da
+ premerge successful
+picked tool 'internal:merge' for foo (binary False symlink False)
+merging foo
+my foo@2092631ce82b+ other foo@7731dad1c2b9 ancestor foo@310fd17130da
+ premerge successful
+0 files updated, 2 files merged, 0 files removed, 0 files unresolved
+(branch merge, don't forget to commit)
+-- foo --
+line 0
+line 1
+line 2-1
+-- bar --
+line 0
+line 1
+line 2-2
--- a/tests/test-empty-file.out	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-empty-file.out	Wed Feb 06 19:57:52 2008 -0800
@@ -14,6 +14,6 @@
 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
 (branch merge, don't forget to commit)
 M empty2
-b80de5d138758541c5f05265ad144ab9fa86d1db 644 empty1
-b80de5d138758541c5f05265ad144ab9fa86d1db 644 empty2
-b80de5d138758541c5f05265ad144ab9fa86d1db 644 empty3
+b80de5d138758541c5f05265ad144ab9fa86d1db 644   empty1
+b80de5d138758541c5f05265ad144ab9fa86d1db 644   empty2
+b80de5d138758541c5f05265ad144ab9fa86d1db 644   empty3
--- a/tests/test-excessive-merge.out	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-excessive-merge.out	Wed Feb 06 19:57:52 2008 -0800
@@ -43,17 +43,17 @@
      4       275      29      3       4 f6c172c6198c 448a8c5e42f1 7c5dc2e857f2
 
 1
-79d7492df40aa0fa093ec4209be78043c181f094 644 a
-2ed2a3912a0b24502043eae84ee4b279c18b90dd 644 b
+79d7492df40aa0fa093ec4209be78043c181f094 644   a
+2ed2a3912a0b24502043eae84ee4b279c18b90dd 644   b
 2
-2ed2a3912a0b24502043eae84ee4b279c18b90dd 644 a
-79d7492df40aa0fa093ec4209be78043c181f094 644 b
+2ed2a3912a0b24502043eae84ee4b279c18b90dd 644   a
+79d7492df40aa0fa093ec4209be78043c181f094 644   b
 3
-79d7492df40aa0fa093ec4209be78043c181f094 644 a
-79d7492df40aa0fa093ec4209be78043c181f094 644 b
+79d7492df40aa0fa093ec4209be78043c181f094 644   a
+79d7492df40aa0fa093ec4209be78043c181f094 644   b
 4
-79d7492df40aa0fa093ec4209be78043c181f094 644 a
-79d7492df40aa0fa093ec4209be78043c181f094 644 b
+79d7492df40aa0fa093ec4209be78043c181f094 644   a
+79d7492df40aa0fa093ec4209be78043c181f094 644   b
 
    rev    offset  length   base linkrev nodeid       p1           p2
      0         0       5      0       0 2ed2a3912a0b 000000000000 000000000000
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-execute-bit	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,15 @@
+#!/bin/sh
+
+"$TESTDIR/hghave" execbit || exit 80
+
+hg init
+echo a > a
+hg ci -d'0 0' -Am'not executable'
+
+chmod +x a
+hg ci -d'1 0' -m'executable'
+hg id
+
+hg up 0
+hg id
+test -x a && echo executable -- eek || echo not executable -- whew
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-execute-bit.out	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,5 @@
+adding a
+1549299e88d1 tip
+0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+d69afc33ff8a
+not executable -- whew
--- a/tests/test-extdiff	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-extdiff	Wed Feb 06 19:57:52 2008 -0800
@@ -6,7 +6,9 @@
 hg init a
 cd a
 echo a > a
+echo b > b
 hg add
+# should diff cloned directories
 hg extdiff -o -r $opt
 
 echo "[extdiff]" >> $HGRCPATH
@@ -22,13 +24,17 @@
 echo b >> a
 hg ci -d '1 0' -mtest2
 
+# should diff cloned files directly
 hg falabala -r 0:1
 
 # test diff during merge
 hg update 0
-echo b >> b
-hg add b
+echo c >> c
+hg add c
 hg ci -m "new branch" -d '1 0'
 hg update -C 1
 hg merge tip
-hg falabala || echo "diff-like tools yield a non-zero exit code"
+# should diff cloned file against wc file
+hg falabala > out || echo "diff-like tools yield a non-zero exit code"
+# cleanup the output since the wc is a tmp directory
+sed  's:\(.* \).*\(\/test-extdiff\):\1[tmp]\2:' out
--- a/tests/test-extdiff.out	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-extdiff.out	Wed Feb 06 19:57:52 2008 -0800
@@ -1,9 +1,7 @@
 adding a
-making snapshot of 0 files from rev 000000000000
-making snapshot of 1 files from working dir
+adding b
 Only in a: a
-making snapshot of 0 files from rev 000000000000
-making snapshot of 1 files from working dir
+Only in a: b
 diffing a.000000000000 a
 hg falabala [OPTION]... [FILE]...
 
@@ -26,14 +24,10 @@
  -X --exclude  exclude names matching the given patterns
 
 use "hg -v help falabala" to show global options
-making snapshot of 1 files from rev e27a2475d60a
-making snapshot of 1 files from rev 5e49ec8d3f05
-diffing a.e27a2475d60a a.5e49ec8d3f05
+diffing a.8a5febb7f867/a a.34eed99112ab/a
 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
 (branch merge, don't forget to commit)
-making snapshot of 1 files from rev 5e49ec8d3f05
-making snapshot of 1 files from working dir
-diffing a.5e49ec8d3f05 a
 diff-like tools yield a non-zero exit code
+diffing a.34eed99112ab/c [tmp]/test-extdiff/a/c
--- a/tests/test-extension	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-extension	Wed Feb 06 19:57:52 2008 -0800
@@ -47,12 +47,13 @@
 hg clone a b
 
 hg bar
+echo 'foobar = !' >> $HGRCPATH
 
 echo '% module/__init__.py-style'
-echo '[extensions]' > $HGRCPATH
 echo "barfoo = $barfoopath" >> $HGRCPATH
 cd a
 hg foo
+echo 'barfoo = !' >> $HGRCPATH
 
 cd ..
 cat > empty.py <<EOF
@@ -61,9 +62,9 @@
 cmdtable = {}
 EOF
 emptypath=`pwd`/empty.py
-echo '[extensions]' > $HGRCPATH
 echo "empty = $emptypath" >> $HGRCPATH
 hg help empty
+echo 'empty = !' >> $HGRCPATH
 
 cat > debugextension.py <<EOF
 '''only debugcommands
@@ -75,7 +76,7 @@
 cmdtable = {"debugfoobar": (debugfoobar, (), "hg debugfoobar")}
 EOF
 debugpath=`pwd`/debugextension.py
-echo '[extensions]' > $HGRCPATH
 echo "debugextension = $debugpath" >> $HGRCPATH
 hg help debugextension
 hg --debug help debugextension
+echo 'debugextension = !' >> $HGRCPATH
--- a/tests/test-fetch	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-fetch	Wed Feb 06 19:57:52 2008 -0800
@@ -20,5 +20,20 @@
 
 echo c > c/c
 hg --cwd c commit -d '3 0' -Amc
+
+hg clone c d
+hg clone c e
+
 hg --cwd c fetch -d '4 0' -m 'automated merge' ../a
 ls c
+
+hg --cwd a serve -a localhost -p $HGPORT -d --pid-file=hg.pid
+cat a/hg.pid >> "$DAEMON_PIDS"
+
+echo '% fetch over http, no auth'
+hg --cwd d fetch -d '5 0' http://localhost:$HGPORT/
+hg --cwd d tip --template '{desc}\n'
+
+echo '% fetch over http with auth (should be hidden in desc)'
+hg --cwd e fetch -d '5 0' http://user:password@localhost:$HGPORT/
+hg --cwd e tip --template '{desc}\n'
--- a/tests/test-fetch.out	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-fetch.out	Wed Feb 06 19:57:52 2008 -0800
@@ -13,6 +13,8 @@
 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
 1:97d72e5f12c7
 adding c
+2 files updated, 0 files merged, 0 files removed, 0 files unresolved
+2 files updated, 0 files merged, 0 files removed, 0 files unresolved
 pulling from ../a
 searching for changes
 adding changesets
@@ -25,3 +27,25 @@
 a
 b
 c
+% fetch over http, no auth
+pulling from http://localhost:20059/
+searching for changes
+adding changesets
+adding manifests
+adding file changes
+added 1 changesets with 1 changes to 1 files (+1 heads)
+merging with new head 2:97d72e5f12c7
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+new changeset 3:0b6439e938f9 merges remote changes with local
+Automated merge with http://localhost:20059/
+% fetch over http with auth (should be hidden in desc)
+pulling from http://user:***@localhost:20059/
+searching for changes
+adding changesets
+adding manifests
+adding file changes
+added 1 changesets with 1 changes to 1 files (+1 heads)
+merging with new head 2:97d72e5f12c7
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+new changeset 3:0b6439e938f9 merges remote changes with local
+Automated merge with http://localhost:20059/
--- a/tests/test-filebranch	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-filebranch	Wed Feb 06 19:57:52 2008 -0800
@@ -37,7 +37,7 @@
 hg commit -m "branch b" -d "1000000 0"
 
 echo "we shouldn't have anything but n state here"
-hg debugstate | cut -b 1-16,35-
+hg debugstate | cut -b 1-16,37-
 
 echo merging
 hg pull ../a
@@ -48,7 +48,7 @@
 echo new > quux
 
 echo "we shouldn't have anything but foo in merge state here"
-hg debugstate | cut -b 1-16,35- | grep "^m"
+hg debugstate | cut -b 1-16,37- | grep "^m"
 
 hg ci -m "merge" -d "1000000 0"
 
--- a/tests/test-filebranch.out	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-filebranch.out	Wed Feb 06 19:57:52 2008 -0800
@@ -60,10 +60,10 @@
      0         0       3      0       0 b8e02f643373 000000000000 000000000000
      1         3       5      1       3 6128c0f33108 b8e02f643373 000000000000
 manifest entries should match tips of all files
-33d1fb69067a0139622a3fa3b7ba1cdb1367972e 644 bar
-2ffeddde1b65b4827f6746174a145474129fa2ce 644 baz
-aa27919ee4303cfd575e1fb932dd64d75aa08be4 644 foo
-6128c0f33108e8cfbb4e0824d13ae48b466d7280 644 quux
+33d1fb69067a0139622a3fa3b7ba1cdb1367972e 644   bar
+2ffeddde1b65b4827f6746174a145474129fa2ce 644   baz
+aa27919ee4303cfd575e1fb932dd64d75aa08be4 644   foo
+6128c0f33108e8cfbb4e0824d13ae48b466d7280 644   quux
 everything should be clean now
 checking changesets
 checking manifests
--- a/tests/test-git-export	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-git-export	Wed Feb 06 19:57:52 2008 -0800
@@ -78,6 +78,8 @@
 hg ci -m 'mv dst2 dst3; revert start' -d '0 0'
 
 hg diff --git -r 9:11
+echo '%  reversed'
+hg diff --git -r 11:9
 
 echo a >> foo
 hg add foo
@@ -92,12 +94,18 @@
 echo
 echo '% file created before r1 and renamed before r2'
 hg diff --git -r -3:-1
+echo '%  reversed'
+hg diff --git -r -1:-3
 echo
 echo '% file created in r1 and renamed before r2'
 hg diff --git -r -4:-1
+echo '%  reversed'
+hg diff --git -r -1:-4
 echo
 echo '% file created after r1 and renamed before r2'
 hg diff --git -r -5:-1
+echo '%  reversed'
+hg diff --git -r -1:-5
 
 echo
 echo '% comparing with the working dir'
@@ -139,6 +147,8 @@
 hg mv brand-new2 brand-new3-2
 hg ci -m 'multiple renames/copies'
 hg diff --git -r -2 -r -1
+echo '%  reversed'
+hg diff --git -r -1 -r -2
 
 echo '% there should be a trailing TAB if there are spaces in the file name'
 echo foo > 'with spaces'
--- a/tests/test-git-export.out	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-git-export.out	Wed Feb 06 19:57:52 2008 -0800
@@ -33,9 +33,9 @@
 new mode 100644
 rename from src
 rename to dst
---- a/dst
+--- a/src
 +++ b/dst
-@@ -3,3 +3,4 @@ 3
+@@ -3,3 +3,4 @@
  3
  4
  5
@@ -75,28 +75,52 @@
 diff --git a/dst2 b/dst3
 rename from dst2
 rename to dst3
+%  reversed
+diff --git a/dst3 b/dst2
+rename from dst3
+rename to dst2
 
 % file created before r1 and renamed before r2
 diff --git a/foo b/bar
 rename from foo
 rename to bar
---- a/bar
+--- a/foo
 +++ b/bar
-@@ -1,2 +1,3 @@ a
+@@ -1,2 +1,3 @@
  a
  b
 +c
+%  reversed
+diff --git a/bar b/foo
+rename from bar
+rename to foo
+--- a/bar
++++ b/foo
+@@ -1,3 +1,2 @@
+ a
+ b
+-c
 
 % file created in r1 and renamed before r2
 diff --git a/foo b/bar
 rename from foo
 rename to bar
---- a/bar
+--- a/foo
 +++ b/bar
-@@ -1,1 +1,3 @@ a
+@@ -1,1 +1,3 @@
  a
 +b
 +c
+%  reversed
+diff --git a/bar b/foo
+rename from bar
+rename to foo
+--- a/bar
++++ b/foo
+@@ -1,3 +1,1 @@
+ a
+-b
+-c
 
 % file created after r1 and renamed before r2
 diff --git a/bar b/bar
@@ -107,6 +131,15 @@
 +a
 +b
 +c
+%  reversed
+diff --git a/bar b/bar
+deleted file mode 100644
+--- a/bar
++++ /dev/null
+@@ -1,3 +0,0 @@
+-a
+-b
+-c
 
 % comparing with the working dir
 % there's a copy in the working dir...
@@ -145,6 +178,16 @@
 diff --git a/brand-new2 b/brand-new3-2
 copy from brand-new2
 copy to brand-new3-2
+%  reversed
+diff --git a/brand-new3 b/brand-new2
+rename from brand-new3
+rename to brand-new2
+diff --git a/brand-new3-2 b/brand-new3-2
+deleted file mode 100644
+--- a/brand-new3-2
++++ /dev/null
+@@ -1,1 +0,0 @@
+-
 % there should be a trailing TAB if there are spaces in the file name
 diff --git a/with spaces b/with spaces
 new file mode 100644
--- a/tests/test-git-import	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-git-import	Wed Feb 06 19:57:52 2008 -0800
@@ -44,8 +44,12 @@
 copy to copyx
 EOF
 
-test -f copy -a ! -x copy || echo failed
-test -x copyx || echo failed
+if "$TESTDIR/hghave" -q execbit; then
+    test -f copy -a ! -x copy || echo failed
+    test -x copyx || echo failed
+else
+    test -f copy || echo failed
+fi
 cat copy
 hg cat copy
 
@@ -194,3 +198,19 @@
 EOF
 cat "foo bar"
 
+echo % copy then modify the original file
+hg import -m copy-mod-orig - <<EOF
+diff --git a/foo2 b/foo2
+index 257cc56..fe08ec6 100644
+--- a/foo2
++++ b/foo2
+@@ -1 +1,2 @@
+ foo
++new line
+diff --git a/foo2 b/foo3
+similarity index 100%
+copy from foo2
+copy to foo3
+EOF
+
+cat foo3
--- a/tests/test-git-import.out	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-git-import.out	Wed Feb 06 19:57:52 2008 -0800
@@ -55,11 +55,14 @@
 % binary files and regular patch hunks
 applying patch from stdin
 foo
-045c85ba38952325e126c70962cc0f9d9077bc67 644 binary
+045c85ba38952325e126c70962cc0f9d9077bc67 644   binary
 % many binary files
 applying patch from stdin
-045c85ba38952325e126c70962cc0f9d9077bc67 644 mbinary1
-a874b471193996e7cb034bb301cac7bdaf3e3f46 644 mbinary2
+045c85ba38952325e126c70962cc0f9d9077bc67 644   mbinary1
+a874b471193996e7cb034bb301cac7bdaf3e3f46 644   mbinary2
 % filenames with spaces
 applying patch from stdin
 foo
+% copy then modify the original file
+applying patch from stdin
+foo
--- a/tests/test-globalopts.out	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-globalopts.out	Wed Feb 06 19:57:52 2008 -0800
@@ -147,6 +147,7 @@
  annotate     show changeset information per file line
  archive      create unversioned archive of a repository revision
  backout      reverse effect of earlier changeset
+ bisect       subdivision search of changesets
  branch       set or show the current branch name
  branches     list repository named branches
  bundle       create a changegroup file
@@ -175,8 +176,8 @@
  recover      roll back an interrupted transaction
  remove       remove the specified files on the next commit
  rename       rename files; equivalent of copy + remove
- revert       revert files or dirs to their states as of some revision
- rollback     roll back the last transaction in this repository
+ revert       restore individual files or dirs to an earlier state
+ rollback     roll back the last transaction
  root         print the root (top) of the current working dir
  serve        export the repository via HTTP
  showconfig   show combined config settings from all hgrc files
@@ -199,6 +200,7 @@
  annotate     show changeset information per file line
  archive      create unversioned archive of a repository revision
  backout      reverse effect of earlier changeset
+ bisect       subdivision search of changesets
  branch       set or show the current branch name
  branches     list repository named branches
  bundle       create a changegroup file
@@ -227,8 +229,8 @@
  recover      roll back an interrupted transaction
  remove       remove the specified files on the next commit
  rename       rename files; equivalent of copy + remove
- revert       revert files or dirs to their states as of some revision
- rollback     roll back the last transaction in this repository
+ revert       restore individual files or dirs to an earlier state
+ rollback     roll back the last transaction
  root         print the root (top) of the current working dir
  serve        export the repository via HTTP
  showconfig   show combined config settings from all hgrc files
--- a/tests/test-glog	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-glog	Wed Feb 06 19:57:52 2008 -0800
@@ -139,5 +139,8 @@
 echo % glog
 hg glog
 
+echo % file glog
+hg glog 5
+
 echo % unused arguments
-hg glog -q foo || echo failed
+hg glog -q foo bar || echo failed
--- a/tests/test-glog.out	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-glog.out	Wed Feb 06 19:57:52 2008 -0800
@@ -307,9 +307,17 @@
    date:        Thu Jan 01 00:00:00 1970 +0000
    summary:     (0) root
 
+% file glog
+o  changeset:   5:3589c3c477ab
+   parent:      3:02173ffbf857
+   parent:      4:e2cad8233c77
+   user:        test
+   date:        Thu Jan 01 00:00:05 1970 +0000
+   summary:     (5) expand
+
 % unused arguments
 hg glog: invalid arguments
-hg glog [OPTION]...
+hg glog [OPTION]... [FILE]
 
 show revision history alongside an ASCII revision graph
 failed
--- a/tests/test-grep	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-grep	Wed Feb 06 19:57:52 2008 -0800
@@ -52,3 +52,17 @@
 hg ci -m 3 -d '0 0'
 hg grep orange
 hg grep --all orange
+
+# Got a traceback when using grep on a single
+# revision with renamed files.
+cd ..
+echo % issue 685
+hg init issue685
+cd issue685
+echo octarine > color
+hg ci -Amcolor
+hg rename color colour
+hg ci -Am rename
+hg grep octarine
+# Used to crash here
+hg grep -r 1 octarine
--- a/tests/test-grep.out	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-grep.out	Wed Feb 06 19:57:52 2008 -0800
@@ -32,3 +32,8 @@
 color:3:+:orange
 color:2:-:orange
 color:1:+:orange
+% issue 685
+adding color
+color:0:octarine
+colour:1:octarine
+colour:1:octarine
--- a/tests/test-help.out	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-help.out	Wed Feb 06 19:57:52 2008 -0800
@@ -15,7 +15,6 @@
  pull       pull changes from the specified source
  push       push changes to the specified destination
  remove     remove the specified files on the next commit
- revert     revert files or dirs to their states as of some revision
  serve      export the repository via HTTP
  status     show changed files in the working directory
  update     update working directory
@@ -34,7 +33,6 @@
  pull       pull changes from the specified source
  push       push changes to the specified destination
  remove     remove the specified files on the next commit
- revert     revert files or dirs to their states as of some revision
  serve      export the repository via HTTP
  status     show changed files in the working directory
  update     update working directory
@@ -47,6 +45,7 @@
  annotate     show changeset information per file line
  archive      create unversioned archive of a repository revision
  backout      reverse effect of earlier changeset
+ bisect       subdivision search of changesets
  branch       set or show the current branch name
  branches     list repository named branches
  bundle       create a changegroup file
@@ -75,8 +74,8 @@
  recover      roll back an interrupted transaction
  remove       remove the specified files on the next commit
  rename       rename files; equivalent of copy + remove
- revert       revert files or dirs to their states as of some revision
- rollback     roll back the last transaction in this repository
+ revert       restore individual files or dirs to an earlier state
+ rollback     roll back the last transaction
  root         print the root (top) of the current working dir
  serve        export the repository via HTTP
  showconfig   show combined config settings from all hgrc files
@@ -95,6 +94,7 @@
  annotate     show changeset information per file line
  archive      create unversioned archive of a repository revision
  backout      reverse effect of earlier changeset
+ bisect       subdivision search of changesets
  branch       set or show the current branch name
  branches     list repository named branches
  bundle       create a changegroup file
@@ -123,8 +123,8 @@
  recover      roll back an interrupted transaction
  remove       remove the specified files on the next commit
  rename       rename files; equivalent of copy + remove
- revert       revert files or dirs to their states as of some revision
- rollback     roll back the last transaction in this repository
+ revert       restore individual files or dirs to an earlier state
+ rollback     roll back the last transaction
  root         print the root (top) of the current working dir
  serve        export the repository via HTTP
  showconfig   show combined config settings from all hgrc files
@@ -212,11 +212,15 @@
 use "hg -v help diff" to show global options
 hg status [OPTION]... [FILE]...
 
+aliases: st
+
 show changed files in the working directory
 
     Show status of files in the repository.  If names are given, only
-    files that match are shown.  Files that are clean or ignored, are
-    not listed unless -c (clean), -i (ignored) or -A is given.
+    files that match are shown.  Files that are clean or ignored or
+    source of a copy/move operation, are not listed unless -c (clean),
+    -i (ignored), -C (copies) or -A is given.  Unless options described
+    with "show only ..." are given, the options -mardu are used.
 
     NOTE: status may appear to disagree with diff if permissions have
     changed or a merge has occurred. The standard diff format does not
@@ -233,11 +237,9 @@
     C = clean
     ! = deleted, but still tracked
     ? = not tracked
-    I = ignored (not shown by default)
+    I = ignored
       = the previous added file was copied from here
 
-aliases: st
-
 options:
 
  -A --all        show status of all files
@@ -277,7 +279,6 @@
  pull       pull changes from the specified source
  push       push changes to the specified destination
  remove     remove the specified files on the next commit
- revert     revert files or dirs to their states as of some revision
  serve      export the repository via HTTP
  status     show changed files in the working directory
  update     update working directory
@@ -301,7 +302,6 @@
  pull       pull changes from the specified source
  push       push changes to the specified destination
  remove     remove the specified files on the next commit
- revert     revert files or dirs to their states as of some revision
  serve      export the repository via HTTP
  status     show changed files in the working directory
  update     update working directory
--- a/tests/test-hgignore	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-hgignore	Wed Feb 06 19:57:52 2008 -0800
@@ -40,12 +40,15 @@
 echo ".*\.o" > .hgignore
 echo "--" ; hg status
 
-# XXX: broken
-#echo "glob:**.o" > .hgignore
-#echo "--" ; hg status
-#
-#echo "glob:*.o" > .hgignore
-#echo "--" ; hg status
+echo "glob:**.o" > .hgignore
+echo "--" ; hg status
+
+echo "glob:*.o" > .hgignore
+echo "--" ; hg status
+
+echo "syntax: glob" > .hgignore
+echo "re:.*\.o" >> .hgignore
+echo "--" ; hg status
 
 echo "syntax: invalid" > .hgignore
 echo "--" ; hg status 2>&1 | sed -e 's/.*\.hgignore:/.hgignore:/'
--- a/tests/test-hgignore.out	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-hgignore.out	Wed Feb 06 19:57:52 2008 -0800
@@ -14,6 +14,21 @@
 ? a.c
 ? syntax
 --
+A dir/b.o
+? .hgignore
+? a.c
+? syntax
+--
+A dir/b.o
+? .hgignore
+? a.c
+? syntax
+--
+A dir/b.o
+? .hgignore
+? a.c
+? syntax
+--
 .hgignore: ignoring invalid syntax 'invalid'
 A dir/b.o
 ? .hgignore
--- a/tests/test-hgweb	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-hgweb	Wed Feb 06 19:57:52 2008 -0800
@@ -1,4 +1,5 @@
 #!/bin/sh
+# Some tests for hgweb. Tests static files, plain files and different 404's.
 
 hg init test
 cd test
@@ -6,8 +7,36 @@
 echo foo > da/foo
 echo foo > foo
 hg ci -Ambase -d '0 0'
-hg serve -p 20060 -d --pid-file=hg.pid
+hg serve -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log
+cat hg.pid >> $DAEMON_PIDS
 echo % manifest
-("$TESTDIR/get-with-headers.py" localhost:20060 '/file/tip/?style=raw')
-("$TESTDIR/get-with-headers.py" localhost:20060 '/file/tip/da?style=raw')
+("$TESTDIR/get-with-headers.py" localhost:$HGPORT '/file/tip/?style=raw')
+("$TESTDIR/get-with-headers.py" localhost:$HGPORT '/file/tip/da?style=raw')
+
+echo % plain file
+"$TESTDIR/get-with-headers.py" localhost:$HGPORT '/file/tip/foo?style=raw'
+
+echo % should give a 404 - static file that does not exist
+"$TESTDIR/get-with-headers.py" localhost:$HGPORT '/static/bogus'
+
+echo % should give a 404 - bad revision
+"$TESTDIR/get-with-headers.py" localhost:$HGPORT '/file/spam/foo?style=raw'
+
+echo % should give a 400 - bad command
+"$TESTDIR/get-with-headers.py" localhost:$HGPORT '/file/tip/foo?cmd=spam&style=raw' | sed 's/400.*/400/'
+
+echo % should give a 404 - file does not exist
+"$TESTDIR/get-with-headers.py" localhost:$HGPORT '/file/tip/bork?style=raw'
+
+echo % stop and restart
 kill `cat hg.pid`
+hg serve -p $HGPORT -d --pid-file=hg.pid -A access.log
+cat hg.pid >> $DAEMON_PIDS
+# Test the access/error files are opened in append mode
+python -c "print len(file('access.log').readlines()), 'log lines written'"
+
+echo % static file
+"$TESTDIR/get-with-headers.py" localhost:$HGPORT '/static/style-gitweb.css'
+
+echo % errors
+cat errors.log
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-hgweb-commands	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,54 @@
+#!/bin/sh
+# An attempt at more fully testing the hgweb web interface.
+# The following things are tested elsewhere and are therefore omitted:
+# - archive, tested in test-archive
+# - unbundle, tested in test-push-http
+# - changegroupsubset, tested in test-pull
+
+echo % Set up the repo
+hg init test
+cd test
+mkdir da
+echo foo > da/foo
+echo foo > foo
+hg ci -d'0 0' -Ambase
+hg tag 1.0
+hg serve -n test -p $HGPORT -d --pid-file=hg.pid -E errors.log
+cat hg.pid >> $DAEMON_PIDS
+
+echo % Logs and changes
+"$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/log/?style=atom' | sed "s/http:\/\/[^/]*\//http:\/\/127.0.0.1\//"
+"$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/log/1/?style=atom' | sed "s/http:\/\/[^/]*\//http:\/\/127.0.0.1\//"
+"$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/log/1/foo/?style=atom' | sed "s/http:\/\/[^/]*\//http:\/\/127.0.0.1\//"
+"$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/shortlog/' | sed "s/[0-9]* years/many years/"
+"$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/rev/1/?style=raw'
+
+echo % File-related
+"$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/file/1/foo/?style=raw'
+"$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/annotate/1/foo/?style=raw'
+"$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/file/1/?style=raw'
+"$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/file/1/foo' | sed "s/[0-9]* years/many years/"
+"$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/filediff/1/foo/?style=raw'
+
+echo % Overviews
+"$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/tags/?style=atom' | sed "s/http:\/\/[^/]*\//http:\/\/127.0.0.1\//"
+"$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/summary/?style=gitweb' | sed "s/[0-9]* years ago/long ago/g"
+
+echo % capabilities
+"$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/capabilities'
+echo % heads
+"$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/heads'
+echo % lookup
+"$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/lookup/1'
+echo % branches
+"$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/branches'
+echo % changegroup
+"$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/changegroup'
+echo % stream_out
+"$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/stream_out'
+
+echo % Static files
+"$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/static/style.css'
+
+echo % ERRORS ENCOUNTERED
+cat errors.log
Binary file tests/test-hgweb-commands.out has changed
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-hgweb-no-request-uri	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,77 @@
+#!/bin/sh
+# This tests if hgweb and hgwebdir still work if the REQUEST_URI variable is
+# no longer passed with the request. Instead, SCRIPT_NAME and PATH_INFO
+# should be used from d74fc8dec2b4 onward to route the request.
+
+mkdir repo
+cd repo
+hg init
+echo foo > bar
+hg add bar
+hg commit -m "test" -d "0 0" -u "Testing"
+hg tip
+
+cat > request.py <<EOF
+from mercurial.hgweb import hgweb, hgwebdir
+from StringIO import StringIO
+import os, sys
+
+errors = StringIO()
+input = StringIO()
+
+def startrsp(headers, data):
+	print '---- HEADERS'
+	print headers
+	print '---- DATA'
+	print data
+	return output.write
+
+env = {
+	'wsgi.version': (1, 0),
+	'wsgi.url_scheme': 'http',
+	'wsgi.errors': errors,
+	'wsgi.input': input,
+	'wsgi.multithread': False,
+	'wsgi.multiprocess': False,
+	'wsgi.run_once': False,
+	'REQUEST_METHOD': 'GET',
+	'SCRIPT_NAME': '',
+	'SERVER_NAME': '127.0.0.1',
+	'SERVER_PORT': os.environ['HGPORT'],
+	'SERVER_PROTOCOL': 'HTTP/1.0'
+}
+
+output = StringIO()
+env['PATH_INFO'] = '/'
+env['QUERY_STRING'] = 'style=atom'
+hgweb('.', name = 'repo')(env, startrsp)
+print output.getvalue()
+print '---- ERRORS'
+print errors.getvalue()
+
+output = StringIO()
+env['PATH_INFO'] = '/file/tip/'
+env['QUERY_STRING'] = 'style=raw'
+hgweb('.', name = 'repo')(env, startrsp)
+print output.getvalue()
+print '---- ERRORS'
+print errors.getvalue()
+
+output = StringIO()
+env['PATH_INFO'] = '/'
+env['QUERY_STRING'] = 'style=raw'
+hgwebdir({'repo': '.'})(env, startrsp)
+print output.getvalue()
+print '---- ERRORS'
+print errors.getvalue()
+
+output = StringIO()
+env['PATH_INFO'] = '/repo/file/tip/'
+env['QUERY_STRING'] = 'style=raw'
+hgwebdir({'repo': '.'})(env, startrsp)
+print output.getvalue()
+print '---- ERRORS'
+print errors.getvalue()
+EOF
+
+python request.py | sed "s/http:\/\/127\.0\.0\.1:[0-9]*\//http:\/\/127.0.0.1\//"
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-hgweb-no-request-uri.out	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,72 @@
+changeset:   0:4cbec7e6f8c4
+tag:         tip
+user:        Testing
+date:        Thu Jan 01 00:00:00 1970 +0000
+summary:     test
+
+---- HEADERS
+200 Script output follows
+---- DATA
+[('Content-Type', 'application/atom+xml; charset=ascii')]
+<?xml version="1.0" encoding="ascii"?>
+<feed xmlns="http://www.w3.org/2005/Atom">
+ <!-- Changelog -->
+ <id>http://127.0.0.1/</id>
+ <link rel="self" href="http://127.0.0.1/atom-log"/>
+ <link rel="alternate" href="http://127.0.0.1/"/>
+ <title>repo Changelog</title>
+ <updated>1970-01-01T00:00:00+00:00</updated>
+
+ <entry>
+  <title>test</title>
+  <id>http://www.selenic.com/mercurial/#changeset-4cbec7e6f8c42eb52b6b52670e1f7560ae9a101e</id>
+  <link href="http://127.0.0.1/rev/4cbec7e6f8c42eb52b6b52670e1f7560ae9a101e"/>
+  <author>
+   <name>Testing</name>
+   <email>&#84;&#101;&#115;&#116;&#105;&#110;&#103;</email>
+  </author>
+  <updated>1970-01-01T00:00:00+00:00</updated>
+  <published>1970-01-01T00:00:00+00:00</published>
+  <content type="xhtml">
+   <div xmlns="http://www.w3.org/1999/xhtml">
+    <pre xml:space="preserve">test</pre>
+   </div>
+  </content>
+ </entry>
+
+</feed>
+
+---- ERRORS
+
+---- HEADERS
+200 Script output follows
+---- DATA
+[('Content-Type', 'text/plain; charset=ascii')]
+
+-rw-r--r-- 4 bar
+
+
+
+---- ERRORS
+
+---- HEADERS
+200 Script output follows
+---- DATA
+[('Content-Type', 'text/plain; charset=ascii')]
+
+/repo/
+
+
+---- ERRORS
+
+---- HEADERS
+200 Script output follows
+---- DATA
+[('Content-Type', 'text/plain; charset=ascii')]
+
+-rw-r--r-- 4 bar
+
+
+
+---- ERRORS
+
--- a/tests/test-hgweb.out	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-hgweb.out	Wed Feb 06 19:57:52 2008 -0800
@@ -14,3 +14,126 @@
 -rw-r--r-- 4 foo
 
 
+% plain file
+200 Script output follows
+
+foo
+% should give a 404 - static file that does not exist
+404 Not Found
+
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN">
+<html>
+<head>
+<link rel="icon" href="/static/hgicon.png" type="image/png" />
+<meta name="robots" content="index, nofollow" />
+<link rel="stylesheet" href="/static/style.css" type="text/css" />
+
+<title>Mercurial Error</title>
+</head>
+<body>
+
+<h2>Mercurial Error</h2>
+
+<p>
+An error occurred while processing your request:
+</p>
+<p>
+Not Found
+</p>
+
+
+<div class="logo">
+powered by<br/>
+<a href="http://www.selenic.com/mercurial/">mercurial</a>
+</div>
+
+</body>
+</html>
+
+% should give a 404 - bad revision
+404 Not Found
+
+
+error: revision not found: spam
+% should give a 400 - bad command
+400
+
+
+error: No such method: spam
+% should give a 404 - file does not exist
+404 Not Found
+
+
+error: Path not found: bork/
+% stop and restart
+7 log lines written
+% static file
+200 Script output follows
+
+body { font-family: sans-serif; font-size: 12px; margin:0px; border:solid #d9d8d1; border-width:1px; margin:10px; }
+a { color:#0000cc; }
+a:hover, a:visited, a:active { color:#880000; }
+div.page_header { height:25px; padding:8px; font-size:18px; font-weight:bold; background-color:#d9d8d1; }
+div.page_header a:visited { color:#0000cc; }
+div.page_header a:hover { color:#880000; }
+div.page_nav { padding:8px; }
+div.page_nav a:visited { color:#0000cc; }
+div.page_path { padding:8px; border:solid #d9d8d1; border-width:0px 0px 1px}
+div.page_footer { padding:4px 8px; background-color: #d9d8d1; }
+div.page_footer_text { float:left; color:#555555; font-style:italic; }
+div.page_body { padding:8px; }
+div.title, a.title {
+	display:block; padding:6px 8px;
+	font-weight:bold; background-color:#edece6; text-decoration:none; color:#000000;
+}
+a.title:hover { background-color: #d9d8d1; }
+div.title_text { padding:6px 0px; border: solid #d9d8d1; border-width:0px 0px 1px; }
+div.log_body { padding:8px 8px 8px 150px; }
+.age { white-space:nowrap; }
+span.age { position:relative; float:left; width:142px; font-style:italic; }
+div.log_link {
+	padding:0px 8px;
+	font-size:10px; font-family:sans-serif; font-style:normal;
+	position:relative; float:left; width:136px;
+}
+div.list_head { padding:6px 8px 4px; border:solid #d9d8d1; border-width:1px 0px 0px; font-style:italic; }
+a.list { text-decoration:none; color:#000000; }
+a.list:hover { text-decoration:underline; color:#880000; }
+table { padding:8px 4px; }
+th { padding:2px 5px; font-size:12px; text-align:left; }
+tr.light:hover, .parity0:hover { background-color:#edece6; }
+tr.dark, .parity1 { background-color:#f6f6f0; }
+tr.dark:hover, .parity1:hover { background-color:#edece6; }
+td { padding:2px 5px; font-size:12px; vertical-align:top; }
+td.link { padding:2px 5px; font-family:sans-serif; font-size:10px; }
+div.pre { font-family:monospace; font-size:12px; white-space:pre; }
+div.diff_info { font-family:monospace; color:#000099; background-color:#edece6; font-style:italic; }
+div.index_include { border:solid #d9d8d1; border-width:0px 0px 1px; padding:12px 8px; }
+div.search { margin:4px 8px; position:absolute; top:56px; right:12px }
+.linenr { color:#999999; text-decoration:none }
+a.rss_logo {
+	float:right; padding:3px 6px; line-height:10px;
+	border:1px solid; border-color:#fcc7a5 #7d3302 #3e1a01 #ff954e;
+	color:#ffffff; background-color:#ff6600;
+	font-weight:bold; font-family:sans-serif; font-size:10px;
+	text-align:center; text-decoration:none;
+}
+a.rss_logo:hover { background-color:#ee5500; }
+pre { margin: 0; }
+span.logtags span {
+	padding: 0px 4px;
+	font-size: 10px;
+	font-weight: normal;
+	border: 1px solid;
+	background-color: #ffaaff;
+	border-color: #ffccff #ff00ee #ff00ee #ffccff;
+}
+span.logtags span.tagtag {
+	background-color: #ffffaa;
+	border-color: #ffffcc #ffee00 #ffee00 #ffffcc;
+}
+span.logtags span.branchtag {
+	background-color: #aaffaa;
+	border-color: #ccffcc #00cc33 #00cc33 #ccffcc;
+}
+% errors
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-hgwebdir	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,84 @@
+#!/bin/sh
+# Tests some basic hgwebdir functionality. Tests setting up paths and
+# collection, different forms of 404s and the subdirectory support.
+
+mkdir webdir
+cd webdir
+
+hg init a
+echo a > a/a
+hg --cwd a ci -Ama -d'1 0'
+
+hg init b
+echo b > b/b
+hg --cwd b ci -Amb -d'2 0'
+
+hg init c
+echo c > c/c
+hg --cwd c ci -Amc -d'3 0'
+root=`pwd`
+
+cd ..
+
+cat > paths.conf <<EOF
+[paths]
+a=$root/a
+b=$root/b
+EOF
+
+hg serve -p $HGPORT -d --pid-file=hg.pid --webdir-conf paths.conf \
+    -A access-paths.log -E error-paths-1.log
+cat hg.pid >> $DAEMON_PIDS
+
+echo % should give a 404 - file does not exist
+"$TESTDIR/get-with-headers.py" localhost:$HGPORT '/a/file/tip/bork?style=raw'
+
+echo % should succeed
+"$TESTDIR/get-with-headers.py" localhost:$HGPORT '/?style=raw'
+"$TESTDIR/get-with-headers.py" localhost:$HGPORT '/a/file/tip/a?style=raw'
+"$TESTDIR/get-with-headers.py" localhost:$HGPORT '/b/file/tip/b?style=raw'
+
+echo % should give a 404 - repo is not published
+"$TESTDIR/get-with-headers.py" localhost:$HGPORT '/c/file/tip/c?style=raw'
+
+cat > paths.conf <<EOF
+[paths]
+t/a/=$root/a
+b=$root/b
+EOF
+
+hg serve -p $HGPORT1 -d --pid-file=hg.pid --webdir-conf paths.conf \
+    -A access-paths.log -E error-paths-2.log
+cat hg.pid >> $DAEMON_PIDS
+
+echo % should succeed, slashy names
+"$TESTDIR/get-with-headers.py" localhost:$HGPORT1 '/?style=raw'
+"$TESTDIR/get-with-headers.py" localhost:$HGPORT1 '/t?style=raw'
+"$TESTDIR/get-with-headers.py" localhost:$HGPORT1 '/t/?style=raw'
+"$TESTDIR/get-with-headers.py" localhost:$HGPORT1 '/t/a?style=atom' \
+	| sed "s/http:\/\/[^/]*\//http:\/\/127.0.0.1\//"
+"$TESTDIR/get-with-headers.py" localhost:$HGPORT1 '/t/a/?style=atom' \
+	| sed "s/http:\/\/[^/]*\//http:\/\/127.0.0.1\//"
+"$TESTDIR/get-with-headers.py" localhost:$HGPORT1 '/t/a/file/tip/a?style=raw'
+
+cat > collections.conf <<EOF
+[collections]
+$root=$root
+EOF
+
+hg serve -p $HGPORT2 -d --pid-file=hg.pid --webdir-conf collections.conf \
+    -A access-collections.log -E error-collections.log
+cat hg.pid >> $DAEMON_PIDS
+
+echo % should succeed
+"$TESTDIR/get-with-headers.py" localhost:$HGPORT2 '/?style=raw'
+"$TESTDIR/get-with-headers.py" localhost:$HGPORT2 '/a/file/tip/a?style=raw'
+"$TESTDIR/get-with-headers.py" localhost:$HGPORT2 '/b/file/tip/b?style=raw'
+"$TESTDIR/get-with-headers.py" localhost:$HGPORT2 '/c/file/tip/c?style=raw'
+
+echo % paths errors 1
+cat error-paths-1.log
+echo % paths errors 2
+cat error-paths-2.log
+echo % collections errors
+cat error-collections.log
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-hgwebdir.out	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,124 @@
+adding a
+adding b
+adding c
+% should give a 404 - file does not exist
+404 Not Found
+
+
+error: Path not found: bork/
+% should succeed
+200 Script output follows
+
+
+/a/
+/b/
+
+200 Script output follows
+
+a
+200 Script output follows
+
+b
+% should give a 404 - repo is not published
+404 Not Found
+
+
+error: repository c not found
+% should succeed, slashy names
+200 Script output follows
+
+
+/b/
+/t/a/
+
+200 Script output follows
+
+
+/t/a/
+
+200 Script output follows
+
+
+/t/a/
+
+200 Script output follows
+
+<?xml version="1.0" encoding="ascii"?>
+<feed xmlns="http://127.0.0.1/2005/Atom">
+ <!-- Changelog -->
+ <id>http://127.0.0.1/t/a/</id>
+ <link rel="self" href="http://127.0.0.1/t/a/atom-log"/>
+ <link rel="alternate" href="http://127.0.0.1/t/a/"/>
+ <title>t/a Changelog</title>
+ <updated>1970-01-01T00:00:01+00:00</updated>
+
+ <entry>
+  <title>a</title>
+  <id>http://127.0.0.1/mercurial/#changeset-8580ff50825a50c8f716709acdf8de0deddcd6ab</id>
+  <link href="http://127.0.0.1/t/a/rev/8580ff50825a50c8f716709acdf8de0deddcd6ab"/>
+  <author>
+   <name>test</name>
+   <email>&#116;&#101;&#115;&#116;</email>
+  </author>
+  <updated>1970-01-01T00:00:01+00:00</updated>
+  <published>1970-01-01T00:00:01+00:00</published>
+  <content type="xhtml">
+   <div xmlns="http://127.0.0.1/1999/xhtml">
+    <pre xml:space="preserve">a</pre>
+   </div>
+  </content>
+ </entry>
+
+</feed>
+200 Script output follows
+
+<?xml version="1.0" encoding="ascii"?>
+<feed xmlns="http://127.0.0.1/2005/Atom">
+ <!-- Changelog -->
+ <id>http://127.0.0.1/t/a/</id>
+ <link rel="self" href="http://127.0.0.1/t/a/atom-log"/>
+ <link rel="alternate" href="http://127.0.0.1/t/a/"/>
+ <title>t/a Changelog</title>
+ <updated>1970-01-01T00:00:01+00:00</updated>
+
+ <entry>
+  <title>a</title>
+  <id>http://127.0.0.1/mercurial/#changeset-8580ff50825a50c8f716709acdf8de0deddcd6ab</id>
+  <link href="http://127.0.0.1/t/a/rev/8580ff50825a50c8f716709acdf8de0deddcd6ab"/>
+  <author>
+   <name>test</name>
+   <email>&#116;&#101;&#115;&#116;</email>
+  </author>
+  <updated>1970-01-01T00:00:01+00:00</updated>
+  <published>1970-01-01T00:00:01+00:00</published>
+  <content type="xhtml">
+   <div xmlns="http://127.0.0.1/1999/xhtml">
+    <pre xml:space="preserve">a</pre>
+   </div>
+  </content>
+ </entry>
+
+</feed>
+200 Script output follows
+
+a
+% should succeed
+200 Script output follows
+
+
+/a/
+/b/
+/c/
+
+200 Script output follows
+
+a
+200 Script output follows
+
+b
+200 Script output follows
+
+c
+% paths errors 1
+% paths errors 2
+% collections errors
--- a/tests/test-hook.out	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-hook.out	Wed Feb 06 19:57:52 2008 -0800
@@ -40,7 +40,6 @@
 added 3 changesets with 2 changes to 2 files
 (run 'hg update' to get a working copy)
 pretag hook: HG_LOCAL=0 HG_NODE=4c52fb2e402287dd5dc052090682536c8406c321 HG_TAG=a 
-tag hook: HG_LOCAL=0 HG_NODE=4c52fb2e402287dd5dc052090682536c8406c321 HG_TAG=a 
 precommit hook: HG_PARENT1=4c52fb2e402287dd5dc052090682536c8406c321 
 pretxncommit hook: HG_NODE=8ea2ef7ad3e8cac946c72f1e0c79d6aebc301198 HG_PARENT1=4c52fb2e402287dd5dc052090682536c8406c321 
 4:8ea2ef7ad3e8
--- a/tests/test-http	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-http	Wed Feb 06 19:57:52 2008 -0800
@@ -6,23 +6,23 @@
 cd test
 echo foo>foo
 hg commit -A -d '0 0' -m 1
-hg --config server.uncompressed=True serve -p 20059 -d --pid-file=../hg1.pid
-hg serve -p 20060 -d --pid-file=../hg2.pid
+hg --config server.uncompressed=True serve -p $HGPORT -d --pid-file=../hg1.pid
+hg serve -p $HGPORT1 -d --pid-file=../hg2.pid
 # Test server address cannot be reused
-hg serve -p 20060 2>&1 | sed -e 's/abort: cannot start server:.*/abort: cannot start server:/'
+hg serve -p $HGPORT1 2>&1 | sed -e 's/abort: cannot start server:.*/abort: cannot start server:/'
 cd ..
 cat hg1.pid hg2.pid >> $DAEMON_PIDS
 
 echo % clone via stream
-http_proxy= hg clone --uncompressed http://localhost:20059/ copy 2>&1 | \
+http_proxy= hg clone --uncompressed http://localhost:$HGPORT/ copy 2>&1 | \
   sed -e 's/[0-9][0-9.]*/XXX/g' -e 's/[KM]\(B\/sec\)/X\1/'
 hg verify -R copy
 
 echo % try to clone via stream, should use pull instead
-http_proxy= hg clone --uncompressed http://localhost:20060/ copy2
+http_proxy= hg clone --uncompressed http://localhost:$HGPORT1/ copy2
 
 echo % clone via pull
-http_proxy= hg clone http://localhost:20059/ copy-pull
+http_proxy= hg clone http://localhost:$HGPORT1/ copy-pull
 hg verify -R copy-pull
 
 cd test
@@ -34,5 +34,5 @@
 cd copy-pull
 echo '[hooks]' >> .hg/hgrc
 echo 'changegroup = python ../printenv.py changegroup' >> .hg/hgrc
-hg pull
+hg pull | sed -e 's,:[0-9][0-9]*/,/,'
 cd ..
--- a/tests/test-http-clone-r	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-http-clone-r	Wed Feb 06 19:57:52 2008 -0800
@@ -49,13 +49,13 @@
 hg debugindex .hg/store/00manifest.i
 hg verify
 echo "# Starting server"
-hg serve -p 20061 -d --pid-file=../hg1.pid
+hg serve -p $HGPORT -d --pid-file=../hg1.pid
 cd ..
 cat hg1.pid >> $DAEMON_PIDS
 
 echo "# clone remote via stream"
 for i in 0 1 2 3 4 5 6 7 8; do
-   hg clone -r "$i" http://localhost:20061/ test-"$i" 2>&1
+   hg clone -r "$i" http://localhost:$HGPORT/ test-"$i" 2>&1
    if cd test-"$i"; then
       hg verify
       cd ..
@@ -66,13 +66,13 @@
 hg verify
 cd ..
 cd test-1
-hg pull -r 4 http://localhost:20061/ 2>&1
+hg pull -r 4 http://localhost:$HGPORT/ 2>&1 | sed -e 's,:[0-9][0-9]*/,/,'
 hg verify
-hg pull http://localhost:20061/ 2>&1
+hg pull http://localhost:$HGPORT/ 2>&1 | sed -e 's,:[0-9][0-9]*/,/,'
 cd ..
 cd test-2
-hg pull -r 5 http://localhost:20061/ 2>&1
+hg pull -r 5 http://localhost:$HGPORT/ 2>&1 | sed -e 's,:[0-9][0-9]*/,/,'
 hg verify
-hg pull http://localhost:20061/ 2>&1
+hg pull http://localhost:$HGPORT/ 2>&1 | sed -e 's,:[0-9][0-9]*/,/,'
 hg verify
 cd ..
--- a/tests/test-http-clone-r.out	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-http-clone-r.out	Wed Feb 06 19:57:52 2008 -0800
@@ -138,7 +138,7 @@
 crosschecking files in changesets and manifests
 checking files
 4 files, 9 changesets, 7 total revisions
-pulling from http://localhost:20061/
+pulling from http://localhost/
 searching for changes
 adding changesets
 adding manifests
@@ -150,14 +150,14 @@
 crosschecking files in changesets and manifests
 checking files
 1 files, 3 changesets, 2 total revisions
-pulling from http://localhost:20061/
+pulling from http://localhost/
 searching for changes
 adding changesets
 adding manifests
 adding file changes
 added 6 changesets with 5 changes to 4 files
 (run 'hg update' to get a working copy)
-pulling from http://localhost:20061/
+pulling from http://localhost/
 searching for changes
 adding changesets
 adding manifests
@@ -169,7 +169,7 @@
 crosschecking files in changesets and manifests
 checking files
 1 files, 5 changesets, 3 total revisions
-pulling from http://localhost:20061/
+pulling from http://localhost/
 searching for changes
 adding changesets
 adding manifests
--- a/tests/test-http-proxy	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-http-proxy	Wed Feb 06 19:57:52 2008 -0800
@@ -4,38 +4,38 @@
 cd a
 echo a > a
 hg ci -Ama -d '1123456789 0'
-hg --config server.uncompressed=True serve -p 20059 -d --pid-file=hg.pid
+hg --config server.uncompressed=True serve -p $HGPORT -d --pid-file=hg.pid
 cat hg.pid >> $DAEMON_PIDS
 
 cd ..
-("$TESTDIR/tinyproxy.py" 20060 localhost >proxy.log 2>&1 </dev/null &
+("$TESTDIR/tinyproxy.py" $HGPORT1 localhost >proxy.log 2>&1 </dev/null &
 echo $! > proxy.pid)
 cat proxy.pid >> $DAEMON_PIDS
 sleep 2
 
 echo %% url for proxy, stream
-http_proxy=http://localhost:20060/ hg --config http_proxy.always=True clone --uncompressed http://localhost:20059/ b | \
+http_proxy=http://localhost:$HGPORT1/ hg --config http_proxy.always=True clone --uncompressed http://localhost:$HGPORT/ b | \
   sed -e 's/[0-9][0-9.]*/XXX/g' -e 's/[KM]\(B\/sec\)/X\1/'
 cd b
 hg verify
 cd ..
 
 echo %% url for proxy, pull
-http_proxy=http://localhost:20060/ hg --config http_proxy.always=True clone http://localhost:20059/ b-pull
+http_proxy=http://localhost:$HGPORT1/ hg --config http_proxy.always=True clone http://localhost:$HGPORT/ b-pull
 cd b-pull
 hg verify
 cd ..
 
 echo %% host:port for proxy
-http_proxy=localhost:20060 hg clone --config http_proxy.always=True http://localhost:20059/ c
+http_proxy=localhost:$HGPORT1 hg clone --config http_proxy.always=True http://localhost:$HGPORT/ c
 
 echo %% proxy url with user name and password
-http_proxy=http://user:passwd@localhost:20060 hg clone --config http_proxy.always=True http://localhost:20059/ d
+http_proxy=http://user:passwd@localhost:$HGPORT1 hg clone --config http_proxy.always=True http://localhost:$HGPORT/ d
 
 echo %% url with user name and password
-http_proxy=http://user:passwd@localhost:20060 hg clone --config http_proxy.always=True http://user:passwd@localhost:20059/ e
+http_proxy=http://user:passwd@localhost:$HGPORT1 hg clone --config http_proxy.always=True http://user:passwd@localhost:$HGPORT/ e
 
 echo %% bad host:port for proxy
-http_proxy=localhost:20061 hg clone --config http_proxy.always=True http://localhost:20059/ f
+http_proxy=localhost:$HGPORT2 hg clone --config http_proxy.always=True http://localhost:$HGPORT/ f
 
 exit 0
--- a/tests/test-http.out	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-http.out	Wed Feb 06 19:57:52 2008 -0800
@@ -31,8 +31,8 @@
 1 files, 1 changesets, 1 total revisions
 adding bar
 % pull
-changegroup hook: HG_NODE=cfbd11a1fa315300a080c3de8fe36b0fc5820acf HG_SOURCE=pull HG_URL=http://localhost:20059/ 
-pulling from http://localhost:20059/
+changegroup hook: HG_NODE=cfbd11a1fa315300a080c3de8fe36b0fc5820acf HG_SOURCE=pull HG_URL=http://localhost/ 
+pulling from http://localhost/
 searching for changes
 adding changesets
 adding manifests
--- a/tests/test-hup.out	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-hup.out	Wed Feb 06 19:57:52 2008 -0800
@@ -4,4 +4,4 @@
 transaction abort!
 rollback completed
 killed!
-.hg/00changelog.i .hg/journal.dirstate .hg/requires .hg/store .hg/store/00changelog.i .hg/store/00changelog.i.a
+.hg/00changelog.i .hg/journal.branch .hg/journal.dirstate .hg/requires .hg/store .hg/store/00changelog.i .hg/store/00changelog.i.a
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-imerge	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,64 @@
+#!/bin/sh
+
+echo "[extensions]" >> $HGRCPATH
+echo "imerge=" >> $HGRCPATH
+HGMERGE=true
+export HGMERGE
+
+hg init base
+cd base
+
+echo foo > foo
+echo bar > bar
+hg ci -Am0 -d '0 0'
+
+hg mv foo foo2
+echo foo >> foo2
+hg ci -m1 -d '1 0'
+
+hg up -C 0
+echo bar >> foo
+echo bar >> bar
+hg ci -m2 -d '2 0'
+
+echo % start imerge
+hg imerge
+
+cat foo2
+cat bar
+
+echo % status -v
+hg -v imerge st
+
+echo % next
+hg imerge next
+
+echo % merge next
+hg --traceback imerge
+
+echo % unresolve
+hg imerge unres foo
+
+echo % merge foo
+hg imerge merge foo
+
+echo % save
+echo foo > foo2
+hg imerge save ../savedmerge
+
+echo % load
+hg up -C 0
+hg imerge --traceback load ../savedmerge
+cat foo2
+
+hg ci -m'merged' -d '3 0'
+hg tip -v
+
+echo % nothing to merge -- tip
+hg imerge
+
+hg up 0
+echo % nothing to merge
+hg imerge
+
+exit 0
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-imerge.out	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,48 @@
+adding bar
+adding foo
+1 files updated, 0 files merged, 1 files removed, 0 files unresolved
+% start imerge
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+(branch merge, don't forget to commit)
+U foo
+foo
+bar
+bar
+bar
+% status -v
+merging e6da46716401 and 30d266f502e7
+U foo (foo2)
+% next
+foo
+% merge next
+merging foo and foo2
+all conflicts resolved
+% unresolve
+% merge foo
+merging foo and foo2
+all conflicts resolved
+% save
+% load
+2 files updated, 0 files merged, 1 files removed, 0 files unresolved
+2 files updated, 0 files merged, 0 files removed, 0 files unresolved
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+(branch merge, don't forget to commit)
+R foo
+all conflicts resolved
+foo
+changeset:   3:fa9a6defdcaf
+tag:         tip
+parent:      2:e6da46716401
+parent:      1:30d266f502e7
+user:        test
+date:        Thu Jan 01 00:00:03 1970 +0000
+files:       foo foo2
+description:
+merged
+
+
+% nothing to merge -- tip
+abort: there is nothing to merge
+2 files updated, 0 files merged, 1 files removed, 0 files unresolved
+% nothing to merge
+abort: there is nothing to merge - use "hg update" instead
--- a/tests/test-import	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-import	Wed Feb 06 19:57:52 2008 -0800
@@ -32,6 +32,20 @@
 hg --cwd b import -mpatch ../tip.patch
 rm -r b
 
+echo % import of plain diff with specific date and user
+hg clone -r0 a b
+hg --cwd a diff -r0:1 > tip.patch
+hg --cwd b import -mpatch -d '1 0' -u 'user@nowhere.net' ../tip.patch
+hg -R b tip -pv
+rm -r b
+
+echo % import of plain diff should be ok with --no-commit
+hg clone -r0 a b
+hg --cwd a diff -r0:1 > tip.patch
+hg --cwd b import --no-commit ../tip.patch
+hg --cwd b diff --nodates
+rm -r b
+
 echo % hg -R repo import
 # put the clone in a subdir - having a directory named "a"
 # used to hide a bug.
@@ -57,7 +71,7 @@
 cat > mkmsg.py <<EOF
 import email.Message, sys
 msg = email.Message.Message()
-msg.set_payload('email commit message\n' + open('tip.patch').read())
+msg.set_payload('email commit message\n' + open('tip.patch', 'rb').read())
 msg['Subject'] = 'email patch'
 msg['From'] = 'email patcher'
 sys.stdout.write(msg.as_string())
@@ -94,10 +108,11 @@
 rm -r b
 
 # subject: duplicate detection, removal of [PATCH]
+# The '---' tests the gitsendmail handling without proper mail headers
 cat > mkmsg2.py <<EOF
 import email.Message, sys
 msg = email.Message.Message()
-msg.set_payload('email patch\n\nnext line\n' + open('tip.patch').read())
+msg.set_payload('email patch\n\nnext line\n---\n' + open('tip.patch').read())
 msg['Subject'] = '[PATCH] email patch'
 msg['From'] = 'email patcher'
 sys.stdout.write(msg.as_string())
@@ -110,6 +125,18 @@
 hg --cwd b tip --template '{desc}\n'
 rm -r b
 
+# We weren't backing up the correct dirstate file when importing many patches
+# (issue963)
+echo '% import patch1 patch2; rollback'
+echo line 3 >> a/a
+hg --cwd a ci -m'third change'
+hg --cwd a export -o '../patch%R' 1 2
+hg clone -qr0 a b
+hg --cwd b parents --template 'parent: #rev#\n'
+hg --cwd b import ../patch1 ../patch2
+hg --cwd b rollback
+hg --cwd b parents --template 'parent: #rev#\n'
+rm -r b
 
 # bug non regression test
 # importing a patch in a subdirectory failed at the commit stage
@@ -128,3 +155,55 @@
 hg --cwd b tip | grep someoneelse
 echo "% should be empty"
 hg --cwd b status
+
+
+# Test fuzziness (ambiguous patch location, fuzz=2)
+echo % test fuzziness
+hg init fuzzy
+cd fuzzy
+echo line1 > a
+echo line0 >> a
+echo line3 >> a
+hg ci -Am adda
+echo line1 > a
+echo line2 >> a
+echo line0 >> a
+echo line3 >> a
+hg ci -m change a
+hg export tip > tip.patch
+hg up -C 0
+echo line1 > a
+echo line0 >> a
+echo line1 >> a
+echo line0 >> a
+hg ci -m brancha
+hg import -v tip.patch
+cd ..
+
+# Test hunk touching empty files (issue906)
+hg init empty
+cd empty
+touch a
+touch b1
+touch c1
+echo d > d
+hg ci -Am init
+echo a > a
+echo b > b1
+hg mv b1 b2
+echo c > c1
+hg copy c1 c2
+rm d
+touch d
+hg diff --git
+hg ci -m empty
+hg export --git tip > empty.diff
+hg up -C 0
+hg import empty.diff
+for name in a b1 b2 c1 c2 d;
+do
+    echo % $name file
+    test -f $name && cat $name
+done
+cd ..
+
--- a/tests/test-import.out	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-import.out	Wed Feb 06 19:57:52 2008 -0800
@@ -22,6 +22,7 @@
 applying ../tip.patch
 transaction abort!
 rollback completed
+abort: empty commit message
 % import of plain diff should be ok with message
 requesting all changes
 adding changesets
@@ -30,6 +31,44 @@
 added 1 changesets with 2 changes to 2 files
 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
 applying ../tip.patch
+% import of plain diff with specific date and user
+requesting all changes
+adding changesets
+adding manifests
+adding file changes
+added 1 changesets with 2 changes to 2 files
+2 files updated, 0 files merged, 0 files removed, 0 files unresolved
+applying ../tip.patch
+changeset:   1:ca68f19f3a40
+tag:         tip
+user:        user@nowhere.net
+date:        Thu Jan 01 00:00:01 1970 +0000
+files:       a
+description:
+patch
+
+
+diff -r 80971e65b431 -r ca68f19f3a40 a
+--- a/a	Thu Jan 01 00:00:00 1970 +0000
++++ b/a	Thu Jan 01 00:00:01 1970 +0000
+@@ -1,1 +1,2 @@
+ line 1
++line 2
+
+% import of plain diff should be ok with --no-commit
+requesting all changes
+adding changesets
+adding manifests
+adding file changes
+added 1 changesets with 2 changes to 2 files
+2 files updated, 0 files merged, 0 files removed, 0 files unresolved
+applying ../tip.patch
+diff -r 80971e65b431 a
+--- a/a
++++ b/a
+@@ -1,1 +1,2 @@
+ line 1
++line 2
 % hg -R repo import
 requesting all changes
 adding changesets
@@ -91,6 +130,7 @@
 applying patch from stdin
 transaction abort!
 rollback completed
+abort: empty commit message
 % hg export in email, should use patch header
 requesting all changes
 adding changesets
@@ -111,6 +151,13 @@
 email patch
 
 next line
+---
+% import patch1 patch2; rollback
+parent: 0
+applying ../patch1
+applying ../patch2
+rolling back last transaction
+parent: 1
 % hg import in a subdirectory
 requesting all changes
 adding changesets
@@ -124,3 +171,55 @@
 % committer should be 'someoneelse'
 user:        someoneelse
 % should be empty
+% test fuzziness
+adding a
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+applying tip.patch
+patching file a
+Hunk #1 succeeded at 1 with fuzz 2 (offset -2 lines).
+a
+adding a
+adding b1
+adding c1
+adding d
+diff --git a/a b/a
+--- a/a
++++ b/a
+@@ -0,0 +1,1 @@
++a
+diff --git a/b1 b/b2
+rename from b1
+rename to b2
+--- a/b1
++++ b/b2
+@@ -0,0 +1,1 @@
++b
+diff --git a/c1 b/c1
+--- a/c1
++++ b/c1
+@@ -0,0 +1,1 @@
++c
+diff --git a/c1 b/c2
+copy from c1
+copy to c2
+--- a/c1
++++ b/c2
+@@ -0,0 +1,1 @@
++c
+diff --git a/d b/d
+--- a/d
++++ b/d
+@@ -1,1 +0,0 @@
+-d
+4 files updated, 0 files merged, 2 files removed, 0 files unresolved
+applying empty.diff
+% a file
+a
+% b1 file
+% b2 file
+b
+% c1 file
+c
+% c2 file
+c
+% d file
--- a/tests/test-incoming-outgoing	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-incoming-outgoing	Wed Feb 06 19:57:52 2008 -0800
@@ -8,20 +8,20 @@
 	hg commit -A -m $i -d "1000000 0"
 done
 hg verify
-hg serve -p 20059 -d --pid-file=hg.pid
+hg serve -p $HGPORT -d --pid-file=hg.pid
 cat hg.pid >> $DAEMON_PIDS
 cd ..
 
 hg init new
 # http incoming
-http_proxy= hg -R new incoming http://localhost:20059/
-http_proxy= hg -R new incoming -r 4 http://localhost:20059/
+http_proxy= hg -R new incoming http://localhost:$HGPORT/ | sed -e 's,:[0-9][0-9]*/,/,'
+http_proxy= hg -R new incoming -r 4 http://localhost:$HGPORT/ | sed -e 's,:[0-9][0-9]*/,/,'
 # local incoming
 hg -R new incoming test
 hg -R new incoming -r 4 test
 
 # test with --bundle
-http_proxy= hg -R new incoming --bundle test.hg http://localhost:20059/
+http_proxy= hg -R new incoming --bundle test.hg http://localhost:$HGPORT/ | sed -e 's,:[0-9][0-9]*/,/,'
 hg -R new incoming --bundle test2.hg test
 
 # test the resulting bundles
@@ -44,5 +44,5 @@
 hg verify
 cd ..
 hg -R test-dev outgoing test
-http_proxy= hg -R test-dev outgoing http://localhost:20059/
-http_proxy= hg -R test-dev outgoing -r 11 http://localhost:20059/
+http_proxy= hg -R test-dev outgoing http://localhost:$HGPORT/ | sed -e 's,:[0-9][0-9]*/,/,'
+http_proxy= hg -R test-dev outgoing -r 11 http://localhost:$HGPORT/ | sed -e 's,:[0-9][0-9]*/,/,'
--- a/tests/test-incoming-outgoing.out	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-incoming-outgoing.out	Wed Feb 06 19:57:52 2008 -0800
@@ -4,7 +4,7 @@
 crosschecking files in changesets and manifests
 checking files
 1 files, 9 changesets, 9 total revisions
-comparing with http://localhost:20059/
+comparing with http://localhost/
 changeset:   0:9cb21d99fe27
 user:        test
 date:        Mon Jan 12 13:46:40 1970 +0000
@@ -51,7 +51,7 @@
 date:        Mon Jan 12 13:46:40 1970 +0000
 summary:     8
 
-comparing with http://localhost:20059/
+comparing with http://localhost/
 changeset:   0:9cb21d99fe27
 user:        test
 date:        Mon Jan 12 13:46:40 1970 +0000
@@ -151,7 +151,7 @@
 date:        Mon Jan 12 13:46:40 1970 +0000
 summary:     4
 
-comparing with http://localhost:20059/
+comparing with http://localhost/
 changeset:   0:9cb21d99fe27
 user:        test
 date:        Mon Jan 12 13:46:40 1970 +0000
@@ -301,7 +301,7 @@
 date:        Mon Jan 12 13:46:40 1970 +0000
 summary:     13
 
-comparing with http://localhost:20059/
+comparing with http://localhost/
 searching for changes
 changeset:   9:3741c3ad1096
 user:        test
@@ -329,7 +329,7 @@
 date:        Mon Jan 12 13:46:40 1970 +0000
 summary:     13
 
-comparing with http://localhost:20059/
+comparing with http://localhost/
 searching for changes
 changeset:   9:3741c3ad1096
 user:        test
--- a/tests/test-install.out	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-install.out	Wed Feb 06 19:57:52 2008 -0800
@@ -2,7 +2,6 @@
 Checking extensions...
 Checking templates...
 Checking patch...
-Checking merge helper...
 Checking commit editor...
 Checking username...
 No problems detected
--- a/tests/test-issue322.out	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-issue322.out	Wed Feb 06 19:57:52 2008 -0800
@@ -1,12 +1,12 @@
 % file replaced with directory
 adding a
 % should fail - would corrupt dirstate
-abort: file named 'a' already in dirstate
+abort: file 'a' in dirstate clashes with 'a/a'
 % directory replaced with file
 adding a/a
 % should fail - would corrupt dirstate
-abort: directory named 'a' already in dirstate
+abort: directory 'a' already in dirstate
 % directory replaced with file
 adding b/c/d
 % should fail - would corrupt dirstate
-abort: directory named 'b' already in dirstate
+abort: directory 'b' already in dirstate
--- a/tests/test-issue352	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-issue352	Wed Feb 06 19:57:52 2008 -0800
@@ -1,6 +1,8 @@
 #!/bin/sh
 # http://www.selenic.com/mercurial/bts/issue352
 
+"$TESTDIR/hghave" eol-in-paths || exit 80
+
 hg init foo
 cd foo
 
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-issue522	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,31 @@
+#!/bin/sh
+
+# In the merge below, the file "foo" has the same contents in both
+# parents, but if we look at the file-level history, we'll notice that
+# the version in p1 is an ancestor of the version in p2.  This test
+# makes sure that we'll use the version from p2 in the manifest of the
+# merge revision.
+
+hg init repo
+cd repo
+
+echo foo > foo
+hg ci -d '0 0' -qAm 'add foo'
+
+echo bar >> foo
+hg ci -d '0 0' -m 'change foo'
+
+hg backout -d '0 0' -r tip -m 'backout changed foo'
+
+hg up -C 0
+touch bar
+hg ci -d '0 0' -qAm 'add bar'
+
+hg merge --debug
+hg debugstate | grep foo
+hg st -A foo
+hg ci -d '0 0' -m 'merge'
+
+hg manifest --debug | grep foo
+hg debugindex .hg/store/data/foo.i
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-issue522.out	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,20 @@
+reverting foo
+changeset 2:4d9e78aaceee backs out changeset 1:b515023e500e
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+resolving manifests
+ overwrite None partial False
+ ancestor bbd179dfa0a7 local 71766447bdbb+ remote 4d9e78aaceee
+  searching for copies back to rev 1
+  unmatched files in local:
+   bar
+ foo: remote is newer -> g
+getting foo
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+(branch merge, don't forget to commit)
+n   0         -2 unset               foo
+M foo
+c6fc755d7e68f49f880599da29f15add41f42f5a 644   foo
+   rev    offset  length   base linkrev nodeid       p1           p2
+     0         0       5      0       0 2ed2a3912a0b 000000000000 000000000000
+     1         5       9      1       1 6f4310b00b9a 2ed2a3912a0b 000000000000
+     2        14       5      2       2 c6fc755d7e68 6f4310b00b9a 000000000000
--- a/tests/test-issue612.out	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-issue612.out	Wed Feb 06 19:57:52 2008 -0800
@@ -1,6 +1,5 @@
 adding src/a.c
-copying src/a.c to source/a.c
-removing src/a.c
+moving src/a.c to source/a.c
 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
 ? src/a.o
 merging src/a.c and source/a.c
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-issue660	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,94 @@
+#!/bin/sh
+# http://www.selenic.com/mercurial/bts/issue660
+
+
+hg init a
+cd a
+echo a > a
+mkdir b
+echo b > b/b
+hg commit -A -m "a is file, b is dir"
+
+echo % file replaced with directory
+
+rm a
+mkdir a
+echo a > a/a
+
+echo % should fail - would corrupt dirstate
+hg add a/a 
+
+echo % removing shadow
+hg rm --after a
+
+echo % should succeed - shadow removed
+hg add a/a
+
+echo % directory replaced with file
+
+rm -r b
+echo b > b
+
+echo % should fail - would corrupt dirstate
+hg add b
+
+echo % removing shadow
+hg rm --after b/b
+
+echo % should succeed - shadow removed
+hg add b
+
+echo % look what we got
+hg st
+
+echo % revert reintroducing shadow - should fail
+rm -r a b
+hg revert b/b
+
+echo % revert all - should succeed
+hg revert --all
+hg st
+
+echo % addremove
+
+rm -r a b
+mkdir a
+echo a > a/a
+echo b > b
+
+hg addremove
+hg st
+
+echo % commit
+hg ci -A -m "a is dir, b is file"
+hg st --all
+
+echo % long directory replaced with file
+
+mkdir d
+mkdir d/d
+echo d > d/d/d
+hg commit -A -m "d is long directory"
+rm -r d
+echo d > d
+
+echo % should fail - would corrupt dirstate
+hg add d
+
+echo % removing shadow
+hg rm --after d/d/d
+
+echo % should succeed - shadow removed
+hg add d
+hg ci -md
+
+echo % update should work at least with clean workdir
+
+rm -r a b d
+hg up -r 0
+hg st --all
+rm -r a b
+hg up -r 1
+hg st --all
+
+exit 0
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-issue660.out	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,49 @@
+adding a
+adding b/b
+% file replaced with directory
+% should fail - would corrupt dirstate
+abort: file 'a' in dirstate clashes with 'a/a'
+% removing shadow
+% should succeed - shadow removed
+% directory replaced with file
+% should fail - would corrupt dirstate
+abort: directory 'b' already in dirstate
+% removing shadow
+% should succeed - shadow removed
+% look what we got
+A a/a
+A b
+R a
+R b/b
+% revert reintroducing shadow - should fail
+abort: file 'b' in dirstate clashes with 'b/b'
+% revert all - should succeed
+undeleting a
+forgetting a/a
+forgetting b
+undeleting b/b
+% addremove
+removing a
+adding a/a
+adding b
+removing b/b
+A a/a
+A b
+R a
+R b/b
+% commit
+C a/a
+C b
+% long directory replaced with file
+adding d/d/d
+% should fail - would corrupt dirstate
+abort: directory 'd' already in dirstate
+% removing shadow
+% should succeed - shadow removed
+% update should work at least with clean workdir
+2 files updated, 0 files merged, 0 files removed, 0 files unresolved
+C a
+C b/b
+2 files updated, 0 files merged, 0 files removed, 0 files unresolved
+C a/a
+C b
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-issue672	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,35 @@
+#!/bin/sh
+
+# 0-2-4
+#  \ \ \
+#   1-3-5
+#
+# rename in #1, content change in #4.
+
+hg init t
+cd t
+
+touch 1
+touch 2
+hg commit -Am init -d "0 0"  # 0
+
+hg rename 1 1a
+hg commit -m rename -d "0 0" # 1
+
+hg co -C 0
+echo unrelated >> 2
+hg ci -m unrelated1 -d "0 0"  # 2
+
+hg merge --debug 1
+hg ci -m merge1 -d "0 0" # 3
+
+hg co -C 2
+echo hello >> 1
+hg ci -m unrelated2 -d "0 0" # 4
+
+hg co -C 3
+hg merge -y --debug 4
+
+hg co -C 4
+hg merge -y --debug 3
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-issue672.out	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,55 @@
+adding 1
+adding 2
+1 files updated, 0 files merged, 1 files removed, 0 files unresolved
+resolving manifests
+ overwrite None partial False
+ ancestor 81f4b099af3d local c64f439569a9+ remote 2f8037f47a5c
+  searching for copies back to rev 1
+  unmatched files in other:
+   1a
+  all copies found (* = to merge, ! = divergent):
+   1a -> 1 
+  checking for directory renames
+ 1: other deleted -> r
+ 1a: remote created -> g
+removing 1
+getting 1a
+1 files updated, 0 files merged, 1 files removed, 0 files unresolved
+(branch merge, don't forget to commit)
+1 files updated, 0 files merged, 1 files removed, 0 files unresolved
+1 files updated, 0 files merged, 1 files removed, 0 files unresolved
+resolving manifests
+ overwrite None partial False
+ ancestor c64f439569a9 local ac7575e3c052+ remote 746e9549ea96
+  searching for copies back to rev 1
+  unmatched files in local:
+   1a
+  all copies found (* = to merge, ! = divergent):
+   1a -> 1 *
+  checking for directory renames
+ 1a: local moved to 1 -> m
+picked tool 'internal:merge' for 1a (binary False symlink False)
+merging 1a and 1
+my 1a@ac7575e3c052+ other 1@746e9549ea96 ancestor 1@81f4b099af3d
+ premerge successful
+0 files updated, 1 files merged, 0 files removed, 0 files unresolved
+(branch merge, don't forget to commit)
+1 files updated, 0 files merged, 1 files removed, 0 files unresolved
+resolving manifests
+ overwrite None partial False
+ ancestor c64f439569a9 local 746e9549ea96+ remote ac7575e3c052
+  searching for copies back to rev 1
+  unmatched files in other:
+   1a
+  all copies found (* = to merge, ! = divergent):
+   1a -> 1 *
+  checking for directory renames
+ 1: remote moved to 1a -> m
+copying 1 to 1a
+picked tool 'internal:merge' for 1 (binary False symlink False)
+merging 1 and 1a
+my 1@746e9549ea96+ other 1a@ac7575e3c052 ancestor 1@81f4b099af3d
+ premerge successful
+removing 1
+0 files updated, 1 files merged, 0 files removed, 0 files unresolved
+(branch merge, don't forget to commit)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-journal-exists	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,12 @@
+#!/bin/sh
+
+hg init
+echo a > a
+hg ci -Am0
+
+touch .hg/store/journal
+
+echo foo > a
+hg ci -Am0
+
+hg recover
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-journal-exists.out	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,8 @@
+adding a
+abort: journal already exists - run hg recover!
+rolling back interrupted transaction
+checking changesets
+checking manifests
+crosschecking files in changesets and manifests
+checking files
+1 files, 1 changesets, 1 total revisions
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-keyword	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,253 @@
+#!/bin/sh
+
+cat <<EOF >> $HGRCPATH
+[extensions]
+hgext.keyword =
+hgext.mq =
+[keyword]
+* =
+b = ignore
+[hooks]
+commit=
+commit.test=cp a hooktest
+EOF
+
+echo % help
+hg help keyword
+
+echo % hg kwdemo
+hg --quiet kwdemo --default \
+| sed -e 's![^ ][^ ]*demo.txt,v!/TMP/demo.txt,v!' \
+ -e 's/,v [a-z0-9][a-z0-9]* /,v xxxxxxxxxxxx /' \
+ -e '/[$]Revision/ s/: [a-z0-9][a-z0-9]* /: xxxxxxxxxxxx /' \
+ -e 's! 20[0-9][0-9]/[01][0-9]/[0-3][0-9] [0-2][0-9]:[0-6][0-9]:[0-6][0-9]! 2000/00/00 00:00:00!'
+
+hg --quiet kwdemo "Branch = {branches}"
+
+hg init Test
+cd Test
+
+echo % kwshrink should exit silently in empty/invalid repo
+hg kwshrink
+
+echo 'expand $Id$' > a
+echo 'do not process $Id:' >> a
+echo 'xxx $' >> a
+echo 'ignore $Id$' > b
+ln -s a sym
+echo % cat
+cat sym a b
+
+echo % addremove
+hg addremove
+echo % status
+hg status
+
+echo % default keyword expansion including commit hook
+echo % interrupted commit should not change state or run commit hook
+hg --debug commit
+echo % status
+hg status
+
+echo % commit
+hg --debug commit -mabsym -d '0 0' -u 'User Name <user@example.com>'
+echo % status
+hg status
+echo % identify
+hg --quiet identify
+echo % cat
+cat sym a b
+echo % hg cat
+hg cat sym a b
+
+echo
+echo % diff a hooktest
+diff a hooktest
+
+echo % removing commit hook from config
+sed -e '/\[hooks\]/,$ d' $HGRCPATH > $HGRCPATH.nohook
+mv $HGRCPATH.nohook $HGRCPATH
+rm hooktest
+
+echo % touch
+touch a b
+echo % status
+hg status
+
+rm sym a b
+echo % update
+hg update
+echo % cat
+cat sym a b
+
+echo % check whether expansion is filewise
+echo '$Id$' > c
+echo 'tests for different changenodes' >> c
+echo % commit c
+hg commit -A -mcndiff -d '1 0' -u 'User Name <user@example.com>'
+echo % force expansion
+hg -v kwexpand
+echo % compare changenodes in a c
+cat a c
+
+echo % qimport
+hg qimport -r tip -n mqtest.diff
+echo % keywords should not be expanded in patch
+cat .hg/patches/mqtest.diff
+echo % qpop
+hg qpop
+echo % qgoto - should imply qpush
+hg qgoto mqtest.diff
+echo % cat
+cat c
+echo % qpop and move on
+hg qpop
+
+echo % copy
+hg cp a c
+
+echo % kwfiles added
+hg kwfiles
+
+echo % commit
+hg --debug commit -ma2c -d '1 0' -u 'User Name <user@example.com>'
+echo % cat a c
+cat a c
+echo % touch copied c after 1 second
+sleep 1
+touch c
+echo % status
+hg status
+
+echo % kwfiles
+hg kwfiles
+
+echo % diff --rev
+hg diff --rev 0 | grep -v 'b/c'
+
+echo % rollback
+hg rollback
+echo % status
+hg status
+echo % update -C
+hg update --clean
+
+echo % custom keyword expansion
+echo % try with kwdemo
+hg --quiet kwdemo "Xinfo = {author}: {desc}"
+
+cat <<EOF >>$HGRCPATH
+[keywordmaps]
+Id = {file} {node|short} {date|rfc822date} {author|user}
+Xinfo = {author}: {desc}
+EOF
+
+echo % cat
+cat sym a b
+echo % hg cat
+hg cat sym a b
+
+echo
+echo '$Xinfo$' >> a
+cat <<EOF >> log
+firstline
+secondline
+EOF
+
+echo % interrupted commit should not change state
+hg commit
+echo % status
+hg status
+
+echo % commit
+hg --debug commit -l log -d '2 0' -u 'User Name <user@example.com>'
+rm log
+echo % status
+hg status
+
+echo % cat
+cat sym a b
+echo % hg cat
+hg cat sym a b
+echo
+
+echo % remove
+hg remove a
+hg --debug commit -m rma
+echo % status
+hg status
+echo % rollback
+hg rollback
+echo % status
+hg status
+echo % revert a
+hg revert --no-backup --rev tip a
+echo % cat a
+cat a
+
+echo % clone to test incoming
+cd ..
+hg clone -r0 Test Test-a
+cd Test-a
+cat <<EOF >> .hg/hgrc
+[paths]
+default = ../Test
+EOF
+echo % incoming
+# remove path to temp dir
+hg incoming | sed -e 's/^\(comparing with \).*\(test-keyword.*\)/\1\2/'
+
+sed -e 's/Id.*/& rejecttest/' a > a.new
+mv a.new a
+echo % commit rejecttest
+hg --debug commit -m'rejects?' -d '3 0' -u 'User Name <user@example.com>'
+echo % export
+hg export -o ../rejecttest.diff tip
+
+cd ../Test
+echo % import
+hg import ../rejecttest.diff
+echo % cat
+cat sym a b
+echo
+echo % rollback
+hg rollback
+echo % clean update
+hg update --clean
+
+echo % kwexpand/kwshrink on selected files
+mkdir x
+echo % copy a x/a
+hg copy a x/a
+echo % kwexpand a
+hg --verbose kwexpand a
+echo % kwexpand x/a should abort
+hg --verbose kwexpand x/a
+cd x
+hg --debug commit -m xa -d '3 0' -u 'User Name <user@example.com>'
+echo % cat a
+cat a
+echo % kwshrink a inside directory x
+hg --verbose kwshrink a
+echo % cat a
+cat a
+cd ..
+
+echo % kwexpand nonexistent
+hg kwexpand nonexistent
+
+echo % switch off expansion
+echo % kwshrink with unknown file u
+cp a u
+hg --verbose kwshrink
+echo % cat
+cat sym a b
+echo % hg cat
+hg cat sym a b
+echo
+rm $HGRCPATH
+echo % cat
+cat sym a b
+echo % hg cat
+hg cat sym a b
+echo
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-keyword.out	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,385 @@
+% help
+keyword extension - keyword expansion in local repositories
+
+This extension expands RCS/CVS-like or self-customized $Keywords$
+in tracked text files selected by your configuration.
+
+Keywords are only expanded in local repositories and not stored in
+the change history. The mechanism can be regarded as a convenience
+for the current user or for archive distribution.
+
+Configuration is done in the [keyword] and [keywordmaps] sections
+of hgrc files.
+
+Example:
+
+    [keyword]
+    # expand keywords in every python file except those matching "x*"
+    **.py =
+    x*    = ignore
+
+Note: the more specific you are in your filename patterns
+      the less you lose speed in huge repos.
+
+For [keywordmaps] template mapping and expansion demonstration and
+control run "hg kwdemo".
+
+An additional date template filter {date|utcdate} is provided.
+
+The default template mappings (view with "hg kwdemo -d") can be replaced
+with customized keywords and templates.
+Again, run "hg kwdemo" to control the results of your config changes.
+
+Before changing/disabling active keywords, run "hg kwshrink" to avoid
+the risk of inadvertedly storing expanded keywords in the change history.
+
+To force expansion after enabling it, or a configuration change, run
+"hg kwexpand".
+
+Also, when committing with the record extension or using mq's qrecord, be aware
+that keywords cannot be updated. Again, run "hg kwexpand" on the files in
+question to update keyword expansions after all changes have been checked in.
+
+Expansions spanning more than one line and incremental expansions,
+like CVS' $Log$, are not supported. A keyword template map
+"Log = {desc}" expands to the first line of the changeset description.
+
+list of commands:
+
+ kwdemo     print [keywordmaps] configuration and an expansion example
+ kwexpand   expand keywords in working directory
+ kwfiles    print files currently configured for keyword expansion
+ kwshrink   revert expanded keywords in working directory
+
+use "hg -v help keyword" to show aliases and global options
+% hg kwdemo
+[extensions]
+hgext.keyword = 
+[keyword]
+* = 
+b = ignore
+demo.txt = 
+[keywordmaps]
+RCSFile = {file|basename},v
+Author = {author|user}
+Header = {root}/{file},v {node|short} {date|utcdate} {author|user}
+Source = {root}/{file},v
+Date = {date|utcdate}
+Id = {file|basename},v {node|short} {date|utcdate} {author|user}
+Revision = {node|short}
+$RCSFile: demo.txt,v $
+$Author: test $
+$Header: /TMP/demo.txt,v xxxxxxxxxxxx 2000/00/00 00:00:00 test $
+$Source: /TMP/demo.txt,v $
+$Date: 2000/00/00 00:00:00 $
+$Id: demo.txt,v xxxxxxxxxxxx 2000/00/00 00:00:00 test $
+$Revision: xxxxxxxxxxxx $
+[extensions]
+hgext.keyword = 
+[keyword]
+* = 
+b = ignore
+demo.txt = 
+[keywordmaps]
+Branch = {branches}
+$Branch: demobranch $
+% kwshrink should exit silently in empty/invalid repo
+% cat
+expand $Id$
+do not process $Id:
+xxx $
+expand $Id$
+do not process $Id:
+xxx $
+ignore $Id$
+% addremove
+adding a
+adding b
+adding sym
+% status
+A a
+A b
+A sym
+% default keyword expansion including commit hook
+% interrupted commit should not change state or run commit hook
+a
+b
+sym
+transaction abort!
+rollback completed
+abort: empty commit message
+% status
+A a
+A b
+A sym
+% commit
+a
+b
+sym
+overwriting a expanding keywords
+running hook commit.test: cp a hooktest
+% status
+? hooktest
+% identify
+f782df5f9602
+% cat
+expand $Id: a,v f782df5f9602 1970/01/01 00:00:00 user $
+do not process $Id:
+xxx $
+expand $Id: a,v f782df5f9602 1970/01/01 00:00:00 user $
+do not process $Id:
+xxx $
+ignore $Id$
+% hg cat
+expand $Id: a,v f782df5f9602 1970/01/01 00:00:00 user $
+do not process $Id:
+xxx $
+ignore $Id$
+a
+% diff a hooktest
+% removing commit hook from config
+% touch
+% status
+% update
+3 files updated, 0 files merged, 0 files removed, 0 files unresolved
+% cat
+expand $Id: a,v f782df5f9602 1970/01/01 00:00:00 user $
+do not process $Id:
+xxx $
+expand $Id: a,v f782df5f9602 1970/01/01 00:00:00 user $
+do not process $Id:
+xxx $
+ignore $Id$
+% check whether expansion is filewise
+% commit c
+adding c
+% force expansion
+overwriting a expanding keywords
+overwriting c expanding keywords
+% compare changenodes in a c
+expand $Id: a,v f782df5f9602 1970/01/01 00:00:00 user $
+do not process $Id:
+xxx $
+$Id: c,v ba4426d1938e 1970/01/01 00:00:01 user $
+tests for different changenodes
+% qimport
+% keywords should not be expanded in patch
+# HG changeset patch
+# User User Name <user@example.com>
+# Date 1 0
+# Node ID ba4426d1938ec9673e03ab274d88c44e24618f7f
+# Parent  f782df5f9602483b4e51c31a12315f353bba380c
+cndiff
+
+diff -r f782df5f9602 -r ba4426d1938e c
+--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
++++ b/c	Thu Jan 01 00:00:01 1970 +0000
+@@ -0,0 +1,2 @@
++$Id$
++tests for different changenodes
+% qpop
+Patch queue now empty
+% qgoto - should imply qpush
+applying mqtest.diff
+Now at: mqtest.diff
+% cat
+$Id: c,v ba4426d1938e 1970/01/01 00:00:01 user $
+tests for different changenodes
+% qpop and move on
+Patch queue now empty
+% copy
+% kwfiles added
+a
+c
+% commit
+c
+ c: copy a:0045e12f6c5791aac80ca6cbfd97709a88307292
+overwriting c expanding keywords
+% cat a c
+expand $Id: a,v f782df5f9602 1970/01/01 00:00:00 user $
+do not process $Id:
+xxx $
+expand $Id: c,v 0ba462c0f077 1970/01/01 00:00:01 user $
+do not process $Id:
+xxx $
+% touch copied c after 1 second
+% status
+% kwfiles
+a
+c
+% diff --rev
+diff -r f782df5f9602 c
+--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+@@ -0,0 +1,3 @@
++expand $Id$
++do not process $Id:
++xxx $
+% rollback
+rolling back last transaction
+% status
+A c
+% update -C
+0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+% custom keyword expansion
+% try with kwdemo
+[extensions]
+hgext.keyword = 
+[keyword]
+* = 
+b = ignore
+demo.txt = 
+[keywordmaps]
+Xinfo = {author}: {desc}
+$Xinfo: test: hg keyword config and expansion example $
+% cat
+expand $Id: a,v f782df5f9602 1970/01/01 00:00:00 user $
+do not process $Id:
+xxx $
+expand $Id: a,v f782df5f9602 1970/01/01 00:00:00 user $
+do not process $Id:
+xxx $
+ignore $Id$
+% hg cat
+expand $Id: a f782df5f9602 Thu, 01 Jan 1970 00:00:00 +0000 user $
+do not process $Id:
+xxx $
+ignore $Id$
+a
+% interrupted commit should not change state
+transaction abort!
+rollback completed
+abort: empty commit message
+% status
+M a
+? log
+% commit
+a
+overwriting a expanding keywords
+% status
+% cat
+expand $Id: a 0729690beff6 Thu, 01 Jan 1970 00:00:02 +0000 user $
+do not process $Id:
+xxx $
+$Xinfo: User Name <user@example.com>: firstline $
+expand $Id: a 0729690beff6 Thu, 01 Jan 1970 00:00:02 +0000 user $
+do not process $Id:
+xxx $
+$Xinfo: User Name <user@example.com>: firstline $
+ignore $Id$
+% hg cat
+expand $Id: a 0729690beff6 Thu, 01 Jan 1970 00:00:02 +0000 user $
+do not process $Id:
+xxx $
+$Xinfo: User Name <user@example.com>: firstline $
+ignore $Id$
+a
+% remove
+% status
+% rollback
+rolling back last transaction
+% status
+R a
+% revert a
+% cat a
+expand $Id: a 0729690beff6 Thu, 01 Jan 1970 00:00:02 +0000 user $
+do not process $Id:
+xxx $
+$Xinfo: User Name <user@example.com>: firstline $
+% clone to test incoming
+requesting all changes
+adding changesets
+adding manifests
+adding file changes
+added 1 changesets with 3 changes to 3 files
+3 files updated, 0 files merged, 0 files removed, 0 files unresolved
+% incoming
+comparing with test-keyword/Test
+searching for changes
+changeset:   1:0729690beff6
+tag:         tip
+user:        User Name <user@example.com>
+date:        Thu Jan 01 00:00:02 1970 +0000
+summary:     firstline
+
+% commit rejecttest
+a
+overwriting a expanding keywords
+% export
+% import
+applying ../rejecttest.diff
+% cat
+expand $Id: a 82983f13f138 Thu, 01 Jan 1970 00:00:03 +0000 user $ rejecttest
+do not process $Id: rejecttest
+xxx $
+$Xinfo: User Name <user@example.com>: rejects? $
+expand $Id: a 82983f13f138 Thu, 01 Jan 1970 00:00:03 +0000 user $ rejecttest
+do not process $Id: rejecttest
+xxx $
+$Xinfo: User Name <user@example.com>: rejects? $
+ignore $Id$
+
+% rollback
+rolling back last transaction
+% clean update
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+% kwexpand/kwshrink on selected files
+% copy a x/a
+% kwexpand a
+overwriting a expanding keywords
+% kwexpand x/a should abort
+abort: outstanding uncommitted changes in given files
+x/a
+ x/a: copy a:779c764182ce5d43e2b1eb66ce06d7b47bfe342e
+overwriting x/a expanding keywords
+% cat a
+expand $Id: x/a f27c134d2d9b Thu, 01 Jan 1970 00:00:03 +0000 user $
+do not process $Id:
+xxx $
+$Xinfo: User Name <user@example.com>: xa $
+% kwshrink a inside directory x
+overwriting x/a shrinking keywords
+% cat a
+expand $Id$
+do not process $Id:
+xxx $
+$Xinfo$
+% kwexpand nonexistent
+nonexistent: No such file or directory
+% switch off expansion
+% kwshrink with unknown file u
+overwriting a shrinking keywords
+overwriting x/a shrinking keywords
+% cat
+expand $Id$
+do not process $Id:
+xxx $
+$Xinfo$
+expand $Id$
+do not process $Id:
+xxx $
+$Xinfo$
+ignore $Id$
+% hg cat
+expand $Id: a 0729690beff6 Thu, 01 Jan 1970 00:00:02 +0000 user $
+do not process $Id:
+xxx $
+$Xinfo: User Name <user@example.com>: firstline $
+ignore $Id$
+a
+% cat
+expand $Id$
+do not process $Id:
+xxx $
+$Xinfo$
+expand $Id$
+do not process $Id:
+xxx $
+$Xinfo$
+ignore $Id$
+% hg cat
+expand $Id$
+do not process $Id:
+xxx $
+$Xinfo$
+ignore $Id$
+a
--- a/tests/test-log	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-log	Wed Feb 06 19:57:52 2008 -0800
@@ -41,6 +41,11 @@
 hg ci -Ame2 -d '6 0'
 hg log -vC --template '{rev} {file_copies%filecopy}\n' -r 5
 
+echo % log copies, execute bit set
+chmod +x e
+hg ci -me3 -d '7 0'
+hg log -vC --template '{rev} {file_copies%filecopy}\n' -r 6
+
 echo '% log -p d'
 hg log -pv d
 
--- a/tests/test-log.out	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-log.out	Wed Feb 06 19:57:52 2008 -0800
@@ -86,6 +86,8 @@
 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
 adding foo
 5 e (dir/b)
+% log copies, execute bit set
+6 
 % log -p d
 changeset:   3:16b60bf3f99a
 user:        test
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-manifest	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,28 @@
+#!/bin/sh
+
+# Source bundle was generated with the following script.
+#
+# hg init
+# echo a > a
+# ln -s a l
+# hg ci -Ama -d'0 0'
+# mkdir b
+# echo a > b/a
+# chmod +x b/a
+# hg ci -Amb -d'1 0'
+
+hg init
+hg -q pull "$TESTDIR/test-manifest.hg"
+
+hg manifest
+hg manifest -v
+hg manifest --debug
+hg manifest -r 0
+hg manifest -r 1
+hg manifest -r tip
+
+echo % should fail
+hg manifest -r 2
+hg manifest -r tip tip
+
+hg manifest tip
Binary file tests/test-manifest.hg has changed
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-manifest.out	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,23 @@
+a
+b/a
+l
+644   a
+755 * b/a
+644 @ l
+b789fdd96dc2f3bd229c1dd8eedf0fc60e2b68e3 644   a
+b789fdd96dc2f3bd229c1dd8eedf0fc60e2b68e3 755 * b/a
+047b75c6d7a3ef6a2243bd0e99f94f6ea6683597 644 @ l
+a
+l
+a
+b/a
+l
+a
+b/a
+l
+% should fail
+abort: unknown revision '2'!
+abort: please specify just one revision
+a
+b/a
+l
--- a/tests/test-merge-commit.out	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-merge-commit.out	Wed Feb 06 19:57:52 2008 -0800
@@ -23,9 +23,12 @@
 resolving manifests
  overwrite None partial False
  ancestor 0a3ab4856510 local 2d2f9a22c82b+ remote 7d3b554bfdf1
+  searching for copies back to rev 1
  bar: versions differ -> m
+picked tool 'internal:merge' for bar (binary False symlink False)
 merging bar
 my bar@2d2f9a22c82b+ other bar@7d3b554bfdf1 ancestor bar@0a3ab4856510
+ premerge successful
 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
 (branch merge, don't forget to commit)
 % contents of bar should be line1 line2
@@ -68,9 +71,12 @@
 resolving manifests
  overwrite None partial False
  ancestor 0a3ab4856510 local 2d2f9a22c82b+ remote 96ab80c60897
+  searching for copies back to rev 1
  bar: versions differ -> m
+picked tool 'internal:merge' for bar (binary False symlink False)
 merging bar
 my bar@2d2f9a22c82b+ other bar@96ab80c60897 ancestor bar@0a3ab4856510
+ premerge successful
 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
 (branch merge, don't forget to commit)
 % contents of bar should be line1 line2
--- a/tests/test-merge-default	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-merge-default	Wed Feb 06 19:57:52 2008 -0800
@@ -34,6 +34,10 @@
 hg merge
 hg commit -mm2
 
+echo % should fail because at tip
+hg merge
+
+hg up 0
 echo % should fail because 1 head
 hg merge
 
--- a/tests/test-merge-default.out	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-merge-default.out	Wed Feb 06 19:57:52 2008 -0800
@@ -13,5 +13,8 @@
 % should succeed - 2 heads
 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
 (branch merge, don't forget to commit)
+% should fail because at tip
+abort: there is nothing to merge
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
 % should fail because 1 head
 abort: there is nothing to merge - use "hg update" instead
--- a/tests/test-merge-local.out	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-merge-local.out	Wed Feb 06 19:57:52 2008 -0800
@@ -18,27 +18,28 @@
 M zzz2_merge_bad
 # local merge with bad merge tool
 merging zzz1_merge_ok
-merging zzz1_merge_ok failed!
 merging zzz2_merge_bad
 merging zzz2_merge_bad failed!
-3 files updated, 0 files merged, 2 files removed, 2 files unresolved
+3 files updated, 1 files merged, 2 files removed, 1 files unresolved
 There are unresolved merges with locally modified files.
-You can redo the full merge using:
+You can finish the partial merge using:
   hg update 0
   hg update 1
 2 files updated, 0 files merged, 3 files removed, 0 files unresolved
 --- a/zzz1_merge_ok
 +++ b/zzz1_merge_ok
++new first line
 +new last line
 --- a/zzz2_merge_bad
 +++ b/zzz2_merge_bad
 +another last line
 M zzz1_merge_ok
 M zzz2_merge_bad
+? zzz2_merge_bad.orig
 # local merge with conflicts
-warning: conflicts during merge.
 merging zzz1_merge_ok
 merging zzz2_merge_bad
+warning: conflicts during merge.
 merging zzz2_merge_bad failed!
 3 files updated, 1 files merged, 2 files removed, 1 files unresolved
 There are unresolved merges with locally modified files.
@@ -57,6 +58,7 @@
 +new last line
 M zzz1_merge_ok
 M zzz2_merge_bad
+? zzz2_merge_bad.orig
 # local merge without conflicts
 merging zzz1_merge_ok
 4 files updated, 1 files merged, 2 files removed, 0 files unresolved
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-merge-prompt	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,69 @@
+#!/bin/sh
+#
+# Test for b5605d88dc27
+#  Make ui.prompt repeat on "unrecognized response" again (issue897)
+# and for 840e2b315c1f
+#  Fix misleading error and prompts during update/merge (issue556)
+
+status() {
+    [ $? -ne 0 ] && echo "failed."
+    echo "status:"
+    hg st -A file1 file2
+    for file in file1 file2; do
+        if [ -f $file ]; then
+            echo "$file:"
+            cat $file
+        else
+            echo "$file does not exist"
+        fi
+    done
+}
+
+hg init repo
+cd repo
+echo 1 > file1
+echo 2 > file2
+hg ci -Am 'added file1 and file2' # rev 0
+
+hg rm file1
+echo changed >> file2
+hg ci -m 'removed file1, changed file2' # rev 1
+
+hg co 0
+echo changed >> file1
+hg rm file2
+hg ci -m 'changed file1, removed file2' # rev 2
+
+echo
+echo "# non-interactive merge"
+hg merge -y || echo "failed"
+status
+
+echo
+echo "# interactive merge"
+hg co -C
+hg merge --config ui.interactive=true <<EOF || echo "failed"
+c
+d
+EOF
+status
+
+echo
+echo "# interactive merge with bad input"
+hg co -C
+hg merge --config ui.interactive=true <<EOF || echo "failed"
+foo
+bar
+d
+baz
+c
+EOF
+status
+
+echo
+echo "# interactive merge with not enough input"
+hg co -C
+hg merge --config ui.interactive=true <<EOF || echo "failed"
+d
+EOF
+status
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-merge-prompt.out	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,64 @@
+adding file1
+adding file2
+2 files updated, 0 files merged, 0 files removed, 0 files unresolved
+
+# non-interactive merge
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+(branch merge, don't forget to commit)
+status:
+M file2
+C file1
+file1:
+1
+changed
+file2:
+2
+changed
+
+# interactive merge
+0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+ local changed file1 which remote deleted
+use (c)hanged version or (d)elete? remote changed file2 which local deleted
+use (c)hanged version or leave (d)eleted? 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+(branch merge, don't forget to commit)
+status:
+file2: No such file or directory
+C file1
+file1:
+1
+changed
+file2 does not exist
+
+# interactive merge with bad input
+0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ local changed file1 which remote deleted
+use (c)hanged version or (d)elete? unrecognized response
+ local changed file1 which remote deleted
+use (c)hanged version or (d)elete? unrecognized response
+ local changed file1 which remote deleted
+use (c)hanged version or (d)elete? remote changed file2 which local deleted
+use (c)hanged version or leave (d)eleted? unrecognized response
+remote changed file2 which local deleted
+use (c)hanged version or leave (d)eleted? 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
+(branch merge, don't forget to commit)
+status:
+M file2
+R file1
+file1 does not exist
+file2:
+2
+changed
+
+# interactive merge with not enough input
+1 files updated, 0 files merged, 1 files removed, 0 files unresolved
+ local changed file1 which remote deleted
+use (c)hanged version or (d)elete? remote changed file2 which local deleted
+use (c)hanged version or leave (d)eleted? abort: response expected
+failed
+status:
+file2: No such file or directory
+C file1
+file1:
+1
+changed
+file2 does not exist
--- a/tests/test-merge-revert2.out	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-merge-revert2.out	Wed Feb 06 19:57:52 2008 -0800
@@ -9,8 +9,8 @@
 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
 f248da0d4c3e tip
 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+merging file1
 warning: conflicts during merge.
-merging file1
 merging file1 failed!
 0 files updated, 0 files merged, 0 files removed, 1 files unresolved
 There are unresolved merges with locally modified files.
@@ -20,18 +20,22 @@
 diff -r f248da0d4c3e file1
 --- a/file1
 +++ b/file1
-@@ -1,3 +1,7 @@ added file1
+@@ -1,3 +1,7 @@
  added file1
  another line of text
-+<<<<<<< my
++<<<<<<< local
 +changed file1 different
 +=======
  changed file1
 +>>>>>>> other
 M file1
+? file1.orig
 f248da0d4c3e+ tip
 reverting file1
+? file1.orig
 f248da0d4c3e tip
+? file1.orig
 f248da0d4c3e tip
 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+? file1.orig
 f248da0d4c3e tip
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-merge-symlinks	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,39 @@
+#!/bin/sh
+
+cat > echo.py <<EOF
+#!/usr/bin/env python
+import os
+for k in ('HG_FILE', 'HG_MY_ISLINK', 'HG_OTHER_ISLINK', 'HG_BASE_ISLINK'):
+    print k, os.environ[k]
+EOF
+
+# Create 2 heads containing the same file, once as
+# a file, once as a link. Bundle was generated with:
+#
+# hg init t
+# cd t
+# echo a > a
+# hg ci -qAm t0 -d '0 0'
+# echo l > l
+# hg ci -qAm t1 -d '1 0'
+# hg up -C 0
+# ln -s a l
+# hg ci -qAm t2 -d '2 0'
+# echo l2 > l2
+# hg ci -qAm t3 -d '3 0'
+
+hg init t
+cd t
+hg -q pull "$TESTDIR/test-merge-symlinks.hg"
+hg up -C 3
+
+# Merge them and display *_ISLINK vars
+echo % merge heads
+HGMERGE="python ../echo.py" hg merge
+
+# Test working directory symlink bit calculation wrt copies,
+# especially on non-supporting systems.
+echo % merge working directory
+hg up -C 2
+hg copy l l2
+HGMERGE="python ../echo.py" hg up 3
Binary file tests/test-merge-symlinks.hg has changed
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-merge-symlinks.out	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,17 @@
+3 files updated, 0 files merged, 0 files removed, 0 files unresolved
+% merge heads
+HG_FILE l
+HG_MY_ISLINK 1
+HG_OTHER_ISLINK 0
+HG_BASE_ISLINK 0
+merging l
+0 files updated, 1 files merged, 0 files removed, 0 files unresolved
+(branch merge, don't forget to commit)
+% merge working directory
+1 files updated, 0 files merged, 1 files removed, 0 files unresolved
+HG_FILE l2
+HG_MY_ISLINK 1
+HG_OTHER_ISLINK 0
+HG_BASE_ISLINK 0
+merging l2
+0 files updated, 1 files merged, 0 files removed, 0 files unresolved
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-merge-types	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,39 @@
+#!/bin/sh
+
+hg init
+echo a > a
+hg ci -Amadd       # 0
+
+chmod +x a
+hg ci -mexecutable # 1
+
+hg up 0
+rm a
+ln -s symlink a
+hg ci -msymlink    # 2
+hg merge --debug
+
+echo % symlink is local parent, executable is other
+
+if [ -h a ]; then
+    echo a is a symlink
+    $TESTDIR/readlink.py a
+elif [ -x a ]; then
+    echo a is executable
+else
+    echo "a has no flags (default for conflicts)"
+fi
+
+hg update -C 1
+hg merge --debug
+
+echo % symlink is other parent, executable is local
+
+if [ -h a ]; then
+    echo a is a symlink
+    $TESTDIR/readlink.py a
+elif [ -x a ]; then
+    echo a is executable
+else
+    echo "a has no flags (default for conflicts)"
+fi
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-merge-types.out	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,22 @@
+adding a
+0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+resolving manifests
+ overwrite None partial False
+ ancestor c334dc3be0da local 521a1e40188f+ remote 3574f3e69b1c
+  searching for copies back to rev 1
+ a: update permissions -> e
+0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+(branch merge, don't forget to commit)
+% symlink is local parent, executable is other
+a has no flags (default for conflicts)
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+resolving manifests
+ overwrite None partial False
+ ancestor c334dc3be0da local 3574f3e69b1c+ remote 521a1e40188f
+  searching for copies back to rev 1
+ a: remote is newer -> g
+getting a
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+(branch merge, don't forget to commit)
+% symlink is other parent, executable is local
+a has no flags (default for conflicts)
--- a/tests/test-merge1.out	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-merge1.out	Wed Feb 06 19:57:52 2008 -0800
@@ -10,7 +10,7 @@
 M b
 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
 %% merge should fail
-abort: untracked local file 'b' differs from remote version
+abort: untracked file in working directory differs from file in requested revision: 'b'
 %% merge of b expected
 merging for b
 merging b
@@ -34,7 +34,7 @@
 diff -r c1dd73cbf59f b
 --- a/b
 +++ b/b
-@@ -1,1 +1,1 @@ This is file b1
+@@ -1,1 +1,1 @@
 -This is file b1
 +This is file b22
 M b
@@ -49,7 +49,7 @@
 diff -r c1dd73cbf59f b
 --- a/b
 +++ b/b
-@@ -1,1 +1,1 @@ This is file b1
+@@ -1,1 +1,1 @@
 -This is file b1
 +This is file b33
 M b
--- a/tests/test-merge10.out	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-merge10.out	Wed Feb 06 19:57:52 2008 -0800
@@ -14,6 +14,6 @@
 diff -r d02b3fc32762 testdir/subdir/a
 --- a/testdir/subdir/a
 +++ b/testdir/subdir/a
-@@ -1,1 +1,1 @@ a
+@@ -1,1 +1,1 @@
 -a
 +alpha
--- a/tests/test-merge6.out	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-merge6.out	Wed Feb 06 19:57:52 2008 -0800
@@ -11,7 +11,7 @@
 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
 (branch merge, don't forget to commit)
 bar should remain deleted.
-f9b0e817f6a48de3564c6b2957687c5e7297c5a0 644 foo
+f9b0e817f6a48de3564c6b2957687c5e7297c5a0 644   foo
 pulling from ../A2
 searching for changes
 adding changesets
@@ -22,4 +22,4 @@
 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
 (branch merge, don't forget to commit)
 bar should remain deleted.
-f9b0e817f6a48de3564c6b2957687c5e7297c5a0 644 foo
+f9b0e817f6a48de3564c6b2957687c5e7297c5a0 644   foo
--- a/tests/test-merge7.out	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-merge7.out	Wed Feb 06 19:57:52 2008 -0800
@@ -6,8 +6,8 @@
 adding file changes
 added 1 changesets with 1 changes to 1 files (+1 heads)
 (run 'hg heads' to see heads, 'hg merge' to merge)
+merging test.txt
 warning: conflicts during merge.
-merging test.txt
 merging test.txt failed!
 0 files updated, 0 files merged, 0 files removed, 1 files unresolved
 There are unresolved merges, you can redo the full merge using:
@@ -20,20 +20,22 @@
 adding file changes
 added 1 changesets with 1 changes to 1 files (+1 heads)
 (run 'hg heads' to see heads, 'hg merge' to merge)
-warning: conflicts during merge.
 resolving manifests
  overwrite None partial False
  ancestor faaea63e63a9 local 451c744aabcc+ remote a070d41e8360
+  searching for copies back to rev 1
  test.txt: versions differ -> m
+picked tool 'internal:merge' for test.txt (binary False symlink False)
 merging test.txt
 my test.txt@451c744aabcc+ other test.txt@a070d41e8360 ancestor test.txt@faaea63e63a9
+warning: conflicts during merge.
 merging test.txt failed!
 0 files updated, 0 files merged, 0 files removed, 1 files unresolved
 There are unresolved merges, you can redo the full merge using:
   hg update -C 3
   hg merge 4
 one
-<<<<<<< my
+<<<<<<< local
 two-point-five
 =======
 two-point-one
--- a/tests/test-merge9.out	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-merge9.out	Wed Feb 06 19:57:52 2008 -0800
@@ -5,8 +5,7 @@
 merging bar
 merging bar failed!
 merging foo and baz
-merging foo failed!
-1 files updated, 0 files merged, 0 files removed, 2 files unresolved
+1 files updated, 1 files merged, 0 files removed, 1 files unresolved
 There are unresolved merges, you can redo the full merge using:
   hg update -C 2
   hg merge 1
@@ -14,8 +13,7 @@
 merging bar
 merging bar failed!
 merging baz and foo
-merging baz failed!
-1 files updated, 0 files merged, 0 files removed, 2 files unresolved
+1 files updated, 1 files merged, 0 files removed, 1 files unresolved
 There are unresolved merges, you can redo the full merge using:
   hg update -C 1
   hg merge 2
--- a/tests/test-mq	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-mq	Wed Feb 06 19:57:52 2008 -0800
@@ -1,5 +1,12 @@
 #!/bin/sh
 
+checkundo()
+{
+    if [ -f .hg/store/undo ]; then
+	echo ".hg/store/undo still exists after $1"
+    fi
+}
+
 echo "[extensions]" >> $HGRCPATH
 echo "mq=" >> $HGRCPATH
 
@@ -35,6 +42,12 @@
 hg --cwd c qinit -c
 hg -R c/.hg/patches st
 
+echo % qnew should refuse bad patch names
+hg -R c qnew series
+hg -R c qnew status
+hg -R c qnew guards
+hg -R c qnew .hgignore
+
 echo % qnew implies add
 
 hg -R c qnew test.patch
@@ -57,6 +70,7 @@
 hg init e
 cd e
 hg qnew A
+checkundo qnew
 echo foo > foo
 hg add foo
 hg qrefresh
@@ -76,6 +90,27 @@
 
 cd a
 
+echo a > somefile
+hg add somefile
+
+echo % qnew with uncommitted changes
+
+hg qnew uncommitted.patch
+hg st
+hg qseries
+
+echo '% qnew with uncommitted changes and missing file (issue 803)'
+
+hg qnew issue803.patch someotherfile 2>&1 | \
+    sed -e 's/someotherfile:.*/someotherfile: No such file or directory/'
+hg st
+hg qseries
+hg qpop -f
+hg qdel issue803.patch
+
+hg revert --no-backup somefile
+rm somefile
+
 echo % qnew -m
 
 hg qnew -m 'foo bar' test.patch
@@ -100,14 +135,17 @@
 hg diff --nodates -q
 # restore things
 hg qrefresh
+checkundo qrefresh
 
 echo % qpop
 
 hg qpop
+checkundo qpop
 
 echo % qpush
 
 hg qpush
+checkundo qpush
 
 cd ..
 
@@ -265,6 +303,13 @@
 echo % mq tags
 hg log --template '{rev} {tags}\n' -r qparent:qtip
 
+echo % bad node in status
+hg qpop
+hg strip -qn tip
+hg tip 2>&1 | sed -e 's/unknown node .*/unknown node/'
+hg branches 2>&1 | sed -e 's/unknown node .*/unknown node/'
+hg qpop
+
 cat >>$HGRCPATH <<EOF
 [diff]
 git = True
@@ -338,6 +383,46 @@
 cat .hg/patches/bar
 hg log -vC --template '{rev} {file_copies%filecopy}\n' -r .
 
+echo % refresh omitting an added file
+hg qnew baz
+echo newfile > newfile
+hg add newfile
+hg qrefresh
+hg st -A newfile
+hg qrefresh -X newfile
+hg st -A newfile
+hg revert newfile
+rm newfile
+hg qpop
+hg qdel baz
+
+echo % create a git patch
+echo a > alexander
+hg add alexander
+hg qnew -f --git addalexander
+grep diff .hg/patches/addalexander
+
+echo % create a git binary patch
+cat > writebin.py <<EOF
+import sys
+path = sys.argv[1]
+open(path, 'wb').write('BIN\x00ARY')
+EOF
+python writebin.py bucephalus
+
+python "$TESTDIR/md5sum.py" bucephalus
+hg add bucephalus
+hg qnew -f --git addbucephalus
+grep diff .hg/patches/addbucephalus
+
+echo % check binary patches can be popped and pushed
+hg qpop
+test -f bucephalus && echo % bucephalus should not be there
+hg qpush
+test -f bucephalus || echo % bucephalus should be there
+python "$TESTDIR/md5sum.py" bucephalus
+
+
 echo '% strip again'
 cd ..
 hg init strip
@@ -354,6 +439,7 @@
 hg ci -m merge -d '0 0'
 hg log
 hg strip 1 2>&1 | sed 's/\(saving bundle to \).*/\1/'
+checkundo strip
 hg log
 cd ..
 
--- a/tests/test-mq-guards	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-mq-guards	Wed Feb 06 19:57:52 2008 -0800
@@ -126,12 +126,12 @@
 hg qseries -v
 
 qappunappv()
-(
+{
     for command in qapplied "qapplied -v" qunapplied "qunapplied -v"; do
         echo % hg $command
         hg $command
     done
-)
+}
 
 hg qpop -a
 hg qguard -l
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-mq-header-date	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,177 @@
+#!/bin/sh
+
+echo "[extensions]" >> $HGRCPATH
+echo "mq=" >> $HGRCPATH
+echo "[diff]" >> $HGRCPATH
+echo "nodates=true" >> $HGRCPATH
+
+
+catpatch() {
+    cat .hg/patches/$1.patch | sed -e "s/^diff \-r [0-9a-f]* /diff -r ... /"
+}
+
+catlog() {
+    catpatch $1
+    hg log --template "{rev}: {desc} - {author}\n"
+}
+
+catlogd() {
+    catpatch $1
+    hg log --template "{rev}: {desc} - {author} - {date}\n"
+}
+
+drop() {
+    hg qpop
+    hg qdel $1.patch
+}
+
+
+echo ==== init
+hg init a
+cd a
+hg qinit
+
+
+echo ==== qnew -d
+hg qnew -d '3 0' 1.patch
+catlogd 1
+
+echo ==== qref
+echo "1" >1
+hg add
+hg qref
+catlogd 1
+
+echo ==== qref -d
+hg qref -d '4 0'
+catlogd 1
+
+
+echo ==== qnew
+hg qnew 2.patch
+echo "2" >2
+hg add
+hg qref
+catlog 2
+
+echo ==== qref -d
+hg qref -d '5 0'
+catlog 2
+
+drop 2
+
+
+echo ==== qnew -d -m
+hg qnew -d '6 0' -m "Three" 3.patch
+catlogd 3
+
+echo ==== qref
+echo "3" >3
+hg add
+hg qref
+catlogd 3
+
+echo ==== qref -m
+hg qref -m "Drei"
+catlogd 3
+
+echo ==== qref -d
+hg qref -d '7 0'
+catlogd 3
+
+echo ==== qref -d -m
+hg qref -d '8 0' -m "Three (again)"
+catlogd 3
+
+
+echo ==== qnew -m
+hg qnew -m "Four" 4.patch
+echo "4" >4
+hg add
+hg qref
+catlog 4
+
+echo ==== qref -d
+hg qref -d '9 0'
+catlog 4
+
+drop 4
+
+
+echo ==== qnew with HG header
+hg qnew 5.patch
+hg qpop
+echo "# HG changeset patch" >>.hg/patches/5.patch
+echo "# Date 10 0" >>.hg/patches/5.patch
+hg qpush 2>&1 | grep 'Now at'
+catlogd 5
+
+echo ==== hg qref
+echo "5" >5
+hg add
+hg qref
+catlogd 5
+
+echo ==== hg qref -d
+hg qref -d '11 0'
+catlogd 5
+
+
+echo ==== qnew -u
+hg qnew -u jane 6.patch
+echo "6" >6
+hg add
+hg qref
+catlog 6
+
+echo ==== qref -d
+hg qref -d '12 0'
+catlog 6
+
+drop 6
+
+
+echo ==== qnew -d
+hg qnew -d '13 0' 7.patch
+echo "7" >7
+hg add
+hg qref
+catlog 7
+
+echo ==== qref -u
+hg qref -u john
+catlogd 7
+
+
+echo ==== qnew
+hg qnew 8.patch
+echo "8" >8
+hg add
+hg qref
+catlog 8
+
+echo ==== qref -u -d
+hg qref -u john -d '14 0'
+catlog 8
+
+drop 8
+
+
+echo ==== qnew -m
+hg qnew -m "Nine" 9.patch
+echo "9" >9
+hg add
+hg qref
+catlog 9
+
+echo ==== qref -u -d
+hg qref -u john -d '15 0'
+catlog 9
+
+drop 9
+
+
+echo ==== "qpop -a / qpush -a"
+hg qpop -a
+hg qpush -a
+hg log --template "{rev}: {desc} - {author} - {date}\n"
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-mq-header-date.out	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,284 @@
+==== init
+==== qnew -d
+# HG changeset patch
+# Date 3 0
+
+0: [mq]: 1.patch - test - 3.00
+==== qref
+adding 1
+# HG changeset patch
+# Date 3 0
+
+diff -r ... 1
+--- /dev/null
++++ b/1
+@@ -0,0 +1,1 @@
++1
+0: [mq]: 1.patch - test - 3.00
+==== qref -d
+# HG changeset patch
+# Date 4 0
+
+diff -r ... 1
+--- /dev/null
++++ b/1
+@@ -0,0 +1,1 @@
++1
+0: [mq]: 1.patch - test - 4.00
+==== qnew
+adding 2
+diff -r ... 2
+--- /dev/null
++++ b/2
+@@ -0,0 +1,1 @@
++2
+1: [mq]: 2.patch - test
+0: [mq]: 1.patch - test
+==== qref -d
+diff -r ... 2
+--- /dev/null
++++ b/2
+@@ -0,0 +1,1 @@
++2
+1: [mq]: 2.patch - test
+0: [mq]: 1.patch - test
+Now at: 1.patch
+==== qnew -d -m
+# HG changeset patch
+# Date 6 0
+
+Three
+1: Three - test - 6.00
+0: [mq]: 1.patch - test - 4.00
+==== qref
+adding 3
+# HG changeset patch
+# Date 6 0
+
+Three
+
+diff -r ... 3
+--- /dev/null
++++ b/3
+@@ -0,0 +1,1 @@
++3
+1: Three - test - 6.00
+0: [mq]: 1.patch - test - 4.00
+==== qref -m
+# HG changeset patch
+# Date 6 0
+
+Drei
+
+diff -r ... 3
+--- /dev/null
++++ b/3
+@@ -0,0 +1,1 @@
++3
+1: Drei - test - 6.00
+0: [mq]: 1.patch - test - 4.00
+==== qref -d
+# HG changeset patch
+# Date 7 0
+
+Drei
+
+diff -r ... 3
+--- /dev/null
++++ b/3
+@@ -0,0 +1,1 @@
++3
+1: Drei - test - 7.00
+0: [mq]: 1.patch - test - 4.00
+==== qref -d -m
+# HG changeset patch
+# Date 8 0
+
+Three (again)
+
+diff -r ... 3
+--- /dev/null
++++ b/3
+@@ -0,0 +1,1 @@
++3
+1: Three (again) - test - 8.00
+0: [mq]: 1.patch - test - 4.00
+==== qnew -m
+adding 4
+Four
+
+diff -r ... 4
+--- /dev/null
++++ b/4
+@@ -0,0 +1,1 @@
++4
+2: Four - test
+1: Three (again) - test
+0: [mq]: 1.patch - test
+==== qref -d
+Four
+
+diff -r ... 4
+--- /dev/null
++++ b/4
+@@ -0,0 +1,1 @@
++4
+2: Four - test
+1: Three (again) - test
+0: [mq]: 1.patch - test
+Now at: 3.patch
+==== qnew with HG header
+Now at: 3.patch
+Now at: 5.patch
+# HG changeset patch
+# Date 10 0
+2: imported patch 5.patch - test - 10.00
+1: Three (again) - test - 8.00
+0: [mq]: 1.patch - test - 4.00
+==== hg qref
+adding 5
+# HG changeset patch
+# Date 10 0
+
+diff -r ... 5
+--- /dev/null
++++ b/5
+@@ -0,0 +1,1 @@
++5
+2: [mq]: 5.patch - test - 10.00
+1: Three (again) - test - 8.00
+0: [mq]: 1.patch - test - 4.00
+==== hg qref -d
+# HG changeset patch
+# Date 11 0
+
+diff -r ... 5
+--- /dev/null
++++ b/5
+@@ -0,0 +1,1 @@
++5
+2: [mq]: 5.patch - test - 11.00
+1: Three (again) - test - 8.00
+0: [mq]: 1.patch - test - 4.00
+==== qnew -u
+adding 6
+From: jane
+
+diff -r ... 6
+--- /dev/null
++++ b/6
+@@ -0,0 +1,1 @@
++6
+3: [mq]: 6.patch - jane
+2: [mq]: 5.patch - test
+1: Three (again) - test
+0: [mq]: 1.patch - test
+==== qref -d
+From: jane
+
+diff -r ... 6
+--- /dev/null
++++ b/6
+@@ -0,0 +1,1 @@
++6
+3: [mq]: 6.patch - jane
+2: [mq]: 5.patch - test
+1: Three (again) - test
+0: [mq]: 1.patch - test
+Now at: 5.patch
+==== qnew -d
+adding 7
+# HG changeset patch
+# Date 13 0
+
+diff -r ... 7
+--- /dev/null
++++ b/7
+@@ -0,0 +1,1 @@
++7
+3: [mq]: 7.patch - test
+2: [mq]: 5.patch - test
+1: Three (again) - test
+0: [mq]: 1.patch - test
+==== qref -u
+# HG changeset patch
+# User john
+# Date 13 0
+
+diff -r ... 7
+--- /dev/null
++++ b/7
+@@ -0,0 +1,1 @@
++7
+3: [mq]: 7.patch - john - 13.00
+2: [mq]: 5.patch - test - 11.00
+1: Three (again) - test - 8.00
+0: [mq]: 1.patch - test - 4.00
+==== qnew
+adding 8
+diff -r ... 8
+--- /dev/null
++++ b/8
+@@ -0,0 +1,1 @@
++8
+4: [mq]: 8.patch - test
+3: [mq]: 7.patch - john
+2: [mq]: 5.patch - test
+1: Three (again) - test
+0: [mq]: 1.patch - test
+==== qref -u -d
+From: john
+
+
+diff -r ... 8
+--- /dev/null
++++ b/8
+@@ -0,0 +1,1 @@
++8
+4: [mq]: 8.patch - john
+3: [mq]: 7.patch - john
+2: [mq]: 5.patch - test
+1: Three (again) - test
+0: [mq]: 1.patch - test
+Now at: 7.patch
+==== qnew -m
+adding 9
+Nine
+
+diff -r ... 9
+--- /dev/null
++++ b/9
+@@ -0,0 +1,1 @@
++9
+4: Nine - test
+3: [mq]: 7.patch - john
+2: [mq]: 5.patch - test
+1: Three (again) - test
+0: [mq]: 1.patch - test
+==== qref -u -d
+From: john
+
+Nine
+
+diff -r ... 9
+--- /dev/null
++++ b/9
+@@ -0,0 +1,1 @@
++9
+4: Nine - john
+3: [mq]: 7.patch - john
+2: [mq]: 5.patch - test
+1: Three (again) - test
+0: [mq]: 1.patch - test
+Now at: 7.patch
+==== qpop -a / qpush -a
+Patch queue now empty
+applying 1.patch
+applying 3.patch
+applying 5.patch
+applying 7.patch
+Now at: 7.patch
+3: imported patch 7.patch - john - 13.00
+2: imported patch 5.patch - test - 11.00
+1: Three (again) - test - 8.00
+0: imported patch 1.patch - test - 4.00
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-mq-header-from	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,107 @@
+#!/bin/sh
+
+echo "[extensions]" >> $HGRCPATH
+echo "mq=" >> $HGRCPATH
+echo "[diff]" >> $HGRCPATH
+echo "nodates=true" >> $HGRCPATH
+
+
+catlog() {
+    cat .hg/patches/$1.patch | sed -e "s/^diff \-r [0-9a-f]* /diff -r ... /"
+    hg log --template "{rev}: {desc} - {author}\n"
+}
+
+
+echo ==== init
+hg init a
+cd a
+hg qinit
+
+
+echo ==== qnew -U
+hg qnew -U 1.patch
+catlog 1
+
+echo ==== qref
+echo "1" >1
+hg add
+hg qref
+catlog 1
+
+echo ==== qref -u
+hg qref -u mary
+catlog 1
+
+echo ==== qnew
+hg qnew 2.patch
+echo "2" >2
+hg add
+hg qref
+catlog 2
+
+echo ==== qref -u
+hg qref -u jane
+catlog 2
+
+
+echo ==== qnew -U -m
+hg qnew -U -m "Three" 3.patch
+catlog 3
+
+echo ==== qref
+echo "3" >3
+hg add
+hg qref
+catlog 3
+
+echo ==== qref -m
+hg qref -m "Drei"
+catlog 3
+
+echo ==== qref -u
+hg qref -u mary
+catlog 3
+
+echo ==== qref -u -m
+hg qref -u maria -m "Three (again)"
+catlog 3
+
+echo ==== qnew -m
+hg qnew -m "Four" 4.patch
+echo "4" >4
+hg add
+hg qref
+catlog 4
+
+echo ==== qref -u
+hg qref -u jane
+catlog 4
+
+
+echo ==== qnew with HG header
+hg qnew 5.patch
+hg qpop
+echo "# HG changeset patch" >>.hg/patches/5.patch
+echo "# User johndoe" >>.hg/patches/5.patch
+hg qpush 2>&1 | grep 'Now at'
+catlog 5
+
+echo ==== hg qref
+echo "5" >5
+hg add
+hg qref
+catlog 5
+
+echo ==== hg qref -U
+hg qref -U
+catlog 5
+
+echo ==== hg qref -u
+hg qref -u johndeere
+catlog 5
+
+
+echo ==== "qpop -a / qpush -a"
+hg qpop -a
+hg qpush -a
+hg log --template "{rev}: {desc} - {author}\n"
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-mq-header-from.out	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,197 @@
+==== init
+==== qnew -U
+From: test
+
+0: [mq]: 1.patch - test
+==== qref
+adding 1
+From: test
+
+diff -r ... 1
+--- /dev/null
++++ b/1
+@@ -0,0 +1,1 @@
++1
+0: [mq]: 1.patch - test
+==== qref -u
+From: mary
+
+diff -r ... 1
+--- /dev/null
++++ b/1
+@@ -0,0 +1,1 @@
++1
+0: [mq]: 1.patch - mary
+==== qnew
+adding 2
+diff -r ... 2
+--- /dev/null
++++ b/2
+@@ -0,0 +1,1 @@
++2
+1: [mq]: 2.patch - test
+0: [mq]: 1.patch - mary
+==== qref -u
+From: jane
+
+
+diff -r ... 2
+--- /dev/null
++++ b/2
+@@ -0,0 +1,1 @@
++2
+1: [mq]: 2.patch - jane
+0: [mq]: 1.patch - mary
+==== qnew -U -m
+From: test
+
+Three
+2: Three - test
+1: [mq]: 2.patch - jane
+0: [mq]: 1.patch - mary
+==== qref
+adding 3
+From: test
+
+Three
+
+diff -r ... 3
+--- /dev/null
++++ b/3
+@@ -0,0 +1,1 @@
++3
+2: Three - test
+1: [mq]: 2.patch - jane
+0: [mq]: 1.patch - mary
+==== qref -m
+From: test
+
+Drei
+
+diff -r ... 3
+--- /dev/null
++++ b/3
+@@ -0,0 +1,1 @@
++3
+2: Drei - test
+1: [mq]: 2.patch - jane
+0: [mq]: 1.patch - mary
+==== qref -u
+From: mary
+
+Drei
+
+diff -r ... 3
+--- /dev/null
++++ b/3
+@@ -0,0 +1,1 @@
++3
+2: Drei - mary
+1: [mq]: 2.patch - jane
+0: [mq]: 1.patch - mary
+==== qref -u -m
+From: maria
+
+Three (again)
+
+diff -r ... 3
+--- /dev/null
++++ b/3
+@@ -0,0 +1,1 @@
++3
+2: Three (again) - maria
+1: [mq]: 2.patch - jane
+0: [mq]: 1.patch - mary
+==== qnew -m
+adding 4
+Four
+
+diff -r ... 4
+--- /dev/null
++++ b/4
+@@ -0,0 +1,1 @@
++4
+3: Four - test
+2: Three (again) - maria
+1: [mq]: 2.patch - jane
+0: [mq]: 1.patch - mary
+==== qref -u
+From: jane
+
+Four
+
+diff -r ... 4
+--- /dev/null
++++ b/4
+@@ -0,0 +1,1 @@
++4
+3: Four - jane
+2: Three (again) - maria
+1: [mq]: 2.patch - jane
+0: [mq]: 1.patch - mary
+==== qnew with HG header
+Now at: 4.patch
+Now at: 5.patch
+# HG changeset patch
+# User johndoe
+4: imported patch 5.patch - johndoe
+3: Four - jane
+2: Three (again) - maria
+1: [mq]: 2.patch - jane
+0: [mq]: 1.patch - mary
+==== hg qref
+adding 5
+# HG changeset patch
+# User johndoe
+
+diff -r ... 5
+--- /dev/null
++++ b/5
+@@ -0,0 +1,1 @@
++5
+4: [mq]: 5.patch - johndoe
+3: Four - jane
+2: Three (again) - maria
+1: [mq]: 2.patch - jane
+0: [mq]: 1.patch - mary
+==== hg qref -U
+# HG changeset patch
+# User test
+
+diff -r ... 5
+--- /dev/null
++++ b/5
+@@ -0,0 +1,1 @@
++5
+4: [mq]: 5.patch - test
+3: Four - jane
+2: Three (again) - maria
+1: [mq]: 2.patch - jane
+0: [mq]: 1.patch - mary
+==== hg qref -u
+# HG changeset patch
+# User johndeere
+
+diff -r ... 5
+--- /dev/null
++++ b/5
+@@ -0,0 +1,1 @@
++5
+4: [mq]: 5.patch - johndeere
+3: Four - jane
+2: Three (again) - maria
+1: [mq]: 2.patch - jane
+0: [mq]: 1.patch - mary
+==== qpop -a / qpush -a
+Patch queue now empty
+applying 1.patch
+applying 2.patch
+applying 3.patch
+applying 4.patch
+applying 5.patch
+Now at: 5.patch
+4: imported patch 5.patch - johndeere
+3: Four - jane
+2: Three (again) - maria
+1: imported patch 2.patch - jane
+0: imported patch 1.patch - mary
--- a/tests/test-mq-merge	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-mq-merge	Wed Feb 06 19:57:52 2008 -0800
@@ -7,6 +7,13 @@
     sed -e 's:\\:/:g' -e 's:[^ ]*/t/::g'
 }
 
+checkundo()
+{
+    if [ -f .hg/store/undo ]; then
+	echo ".hg/store/undo still exists after $1"
+    fi
+}
+
 echo "[extensions]" >> $HGRCPATH
 echo "hgext.mq=" >> $HGRCPATH
 
@@ -25,6 +32,7 @@
 
 # Save the patch queue so we can merge it later
 hg qsave -c -e 2>&1 | rewrite_path
+checkundo qsave
 
 # Update b and commit in an "update" changeset
 hg up -C init
@@ -36,6 +44,7 @@
 # The system cannot find the file specified => a
 hg manifest
 hg qpush -a -m 2>&1 | rewrite_path
+checkundo 'qpush -m'
 hg manifest
 
 # ensure status is correct after merge
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-mq-missingfiles	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,73 @@
+#!/bin/sh
+
+# Test issue835:
+# qpush fails immediately when patching a missing file, but
+# remaining added files are still created empty which will
+# trick a future qrefresh.
+
+cat > writelines.py <<EOF
+import sys
+path = sys.argv[1]
+args = sys.argv[2:]
+assert (len(args) % 2) == 0
+
+f = file(path, 'wb')
+for i in xrange(len(args)/2):
+   count, s = args[2*i:2*i+2]
+   count = int(count)
+   s = s.decode('string_escape')
+   f.write(s*count)
+f.close()
+
+EOF
+
+echo "[extensions]" >> $HGRCPATH
+echo "mq=" >> $HGRCPATH
+
+hg init normal
+cd normal
+python ../writelines.py b 10 'a\n'
+hg ci -Am addb
+echo a > a
+python ../writelines.py b 2 'b\n' 10 'a\n' 2 'c\n'
+echo c > c
+hg add a c
+hg qnew -f changeb
+hg qpop
+hg rm b
+hg ci -Am rmb
+echo % push patch with missing target
+hg qpush
+echo % display added files
+cat a
+cat c
+echo % display rejections
+cat b.rej
+cd ..
+
+
+echo "[diff]" >> $HGRCPATH
+echo "git=1" >> $HGRCPATH
+
+hg init git
+cd git
+python ../writelines.py b 1 '\x00'
+hg ci -Am addb
+echo a > a
+python ../writelines.py b 1 '\x01' 1 '\x00'
+echo c > c
+hg add a c
+hg qnew -f changeb
+hg qpop
+hg rm b
+hg ci -Am rmb
+echo % push git patch with missing target
+hg qpush 2>&1 | sed -e 's/b:.*/b: No such file or directory/'
+hg st
+echo % display added files
+cat a
+cat c
+echo % display rejections
+cat b.rej
+cd ..
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-mq-missingfiles.out	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,49 @@
+adding b
+Patch queue now empty
+% push patch with missing target
+applying changeb
+unable to find 'b' for patching
+2 out of 2 hunks FAILED -- saving rejects to file b.rej
+patch failed, unable to continue (try -v)
+patch failed, rejects left in working dir
+Errors during apply, please fix and refresh changeb
+% display added files
+a
+c
+% display rejections
+--- b
++++ b
+@@ -1,3 +1,5 @@
++b
++b
+ a
+ a
+ a
+@@ -8,3 +10,5 @@
+ a
+ a
+ a
++c
++c
+adding b
+Patch queue now empty
+% push git patch with missing target
+applying changeb
+unable to find 'b' for patching
+1 out of 1 hunk FAILED -- saving rejects to file b.rej
+patch failed, unable to continue (try -v)
+b: No such file or directory
+b not tracked!
+patch failed, rejects left in working dir
+Errors during apply, please fix and refresh changeb
+? b.rej
+% display added files
+a
+c
+% display rejections
+--- b
++++ b
+GIT binary patch
+literal 2
+Jc${No0000400IC2
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-mq-pull-from-bundle	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,86 @@
+#!/bin/sh
+
+cat <<EOF >> $HGRCPATH
+[extensions]
+mq=
+[defaults]
+log = --template "{rev}: {desc}\\n"
+heads = --template "{rev}: {desc}\\n"
+incoming = --template "{rev}: {desc}\\n"
+EOF
+
+echo "====== .hgrc"
+cat $HGRCPATH
+
+echo "====== Setup main"
+hg init base
+cd base
+echo "One" > one
+hg add
+hg ci -m "main: one added."
+echo "++" >> one
+hg ci -m "main: one updated."
+
+echo "====== Bundle main"
+hg bundle --base=null ../main.hg
+cd ..
+
+echo "====== Incoming to fresh repo"
+hg init fresh
+echo ">> hg -R fresh incoming main.hg"
+hg -R fresh incoming main.hg
+echo ">> hg -R fresh incoming bundle:fresh+main.hg"
+hg -R fresh incoming bundle:fresh+main.hg
+
+
+echo "====== Setup queue"
+cd base
+hg qinit -c
+hg qnew -m "patch: two added." two.patch
+echo two > two
+hg add
+hg qrefresh
+hg qcommit -m "queue: two.patch added."
+hg qpop -a
+
+echo "====== Bundle queue"
+hg -R .hg/patches bundle --base=null ../queue.hgq
+cd ..
+
+
+echo "====== Clone base"
+hg clone base copy
+cd copy
+hg qinit -c
+
+echo "====== Incoming queue bundle"
+echo ">> hg -R .hg/patches incoming ../queue.hgq"
+hg -R .hg/patches incoming ../queue.hgq
+
+echo "====== Pull queue bundle"
+echo ">> hg -R .hg/patches pull --update ../queue.hgq"
+hg -R .hg/patches pull --update ../queue.hgq
+echo ">> hg -R .hg/patches heads"
+hg -R .hg/patches heads
+echo ">> hg -R .hg/patches log"
+hg -R .hg/patches log
+echo ">> hg qseries"
+hg qseries
+cd ..
+
+
+echo "====== Clone base again"
+hg clone base copy2
+cd copy2
+hg qinit -c
+
+echo "====== Unbundle queue bundle"
+echo ">> hg -R .hg/patches unbundle --update ../queue.hgq"
+hg -R .hg/patches unbundle --update ../queue.hgq
+echo ">> hg -R .hg/patches heads"
+hg -R .hg/patches heads
+echo ">> hg -R .hg/patches log"
+hg -R .hg/patches log
+echo ">> hg qseries"
+hg qseries
+cd ..
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-mq-pull-from-bundle.out	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,70 @@
+====== .hgrc
+[ui]
+slash = True
+[defaults]
+backout = -d "0 0"
+commit = -d "0 0"
+debugrawcommit = -d "0 0"
+tag = -d "0 0"
+[extensions]
+mq=
+[defaults]
+log = --template "{rev}: {desc}\n"
+heads = --template "{rev}: {desc}\n"
+incoming = --template "{rev}: {desc}\n"
+====== Setup main
+adding one
+====== Bundle main
+2 changesets found
+====== Incoming to fresh repo
+>> hg -R fresh incoming main.hg
+comparing with main.hg
+0: main: one added.
+1: main: one updated.
+>> hg -R fresh incoming bundle:fresh+main.hg
+comparing with bundle:fresh+main.hg
+0: main: one added.
+1: main: one updated.
+====== Setup queue
+adding two
+Patch queue now empty
+====== Bundle queue
+1 changesets found
+====== Clone base
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+====== Incoming queue bundle
+>> hg -R .hg/patches incoming ../queue.hgq
+comparing with ../queue.hgq
+0: queue: two.patch added.
+====== Pull queue bundle
+>> hg -R .hg/patches pull --update ../queue.hgq
+pulling from ../queue.hgq
+requesting all changes
+adding changesets
+adding manifests
+adding file changes
+added 1 changesets with 3 changes to 3 files
+merging series
+2 files updated, 1 files merged, 0 files removed, 0 files unresolved
+>> hg -R .hg/patches heads
+0: queue: two.patch added.
+>> hg -R .hg/patches log
+0: queue: two.patch added.
+>> hg qseries
+two.patch
+====== Clone base again
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+====== Unbundle queue bundle
+>> hg -R .hg/patches unbundle --update ../queue.hgq
+adding changesets
+adding manifests
+adding file changes
+added 1 changesets with 3 changes to 3 files
+merging series
+2 files updated, 1 files merged, 0 files removed, 0 files unresolved
+>> hg -R .hg/patches heads
+0: queue: two.patch added.
+>> hg -R .hg/patches log
+0: queue: two.patch added.
+>> hg qseries
+two.patch
--- a/tests/test-mq-qdiff.out	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-mq-qdiff.out	Wed Feb 06 19:57:52 2008 -0800
@@ -7,13 +7,13 @@
 diff -r 67e992f2c4f3 base
 --- a/base
 +++ b/base
-@@ -1,1 +1,1 @@ base
+@@ -1,1 +1,1 @@
 -base
 +patched
 % qdiff dirname
 diff -r 67e992f2c4f3 base
 --- a/base
 +++ b/base
-@@ -1,1 +1,1 @@ base
+@@ -1,1 +1,1 @@
 -base
 +patched
--- a/tests/test-mq-qrefresh-replace-log-message	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-mq-qrefresh-replace-log-message	Wed Feb 06 19:57:52 2008 -0800
@@ -8,6 +8,11 @@
 hg init
 hg qinit
 
+echo =======================
+echo "Should fail if no patches applied"
+hg qrefresh
+hg qrefresh -e
+
 hg qnew -m "First commit message" first-patch
 echo aaaa > file
 hg add file
--- a/tests/test-mq-qrefresh-replace-log-message.out	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-mq-qrefresh-replace-log-message.out	Wed Feb 06 19:57:52 2008 -0800
@@ -1,3 +1,7 @@
+=======================
+Should fail if no patches applied
+No patches applied
+No patches applied
 =======================
 Should display 'First commit message'
 description:
--- a/tests/test-mq-qrefresh.out	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-mq-qrefresh.out	Wed Feb 06 19:57:52 2008 -0800
@@ -8,26 +8,26 @@
 diff -r b55ecdccb5cf 1/base
 --- a/1/base
 +++ b/1/base
-@@ -1,1 +1,1 @@ base
+@@ -1,1 +1,1 @@
 -base
 +patched
 diff -r b55ecdccb5cf 2/base
 --- a/2/base
 +++ b/2/base
-@@ -1,1 +1,1 @@ base
+@@ -1,1 +1,1 @@
 -base
 +patched
 % qdiff dirname
 diff -r b55ecdccb5cf 1/base
 --- a/1/base
 +++ b/1/base
-@@ -1,1 +1,1 @@ base
+@@ -1,1 +1,1 @@
 -base
 +patched
 diff -r b55ecdccb5cf 2/base
 --- a/2/base
 +++ b/2/base
-@@ -1,1 +1,1 @@ base
+@@ -1,1 +1,1 @@
 -base
 +patched
 % patch file contents
@@ -36,13 +36,13 @@
 diff -r b55ecdccb5cf 1/base
 --- a/1/base
 +++ b/1/base
-@@ -1,1 +1,1 @@ base
+@@ -1,1 +1,1 @@
 -base
 +patched
 diff -r b55ecdccb5cf 2/base
 --- a/2/base
 +++ b/2/base
-@@ -1,1 +1,1 @@ base
+@@ -1,1 +1,1 @@
 -base
 +patched
 % qrefresh 1
@@ -50,26 +50,26 @@
 diff -r b55ecdccb5cf 1/base
 --- a/1/base
 +++ b/1/base
-@@ -1,1 +1,1 @@ base
+@@ -1,1 +1,1 @@
 -base
 +patched
 diff -r b55ecdccb5cf 2/base
 --- a/2/base
 +++ b/2/base
-@@ -1,1 +1,1 @@ base
+@@ -1,1 +1,1 @@
 -base
 +patched
 % qdiff dirname
 diff -r b55ecdccb5cf 1/base
 --- a/1/base
 +++ b/1/base
-@@ -1,1 +1,1 @@ base
+@@ -1,1 +1,1 @@
 -base
 +patched
 diff -r b55ecdccb5cf 2/base
 --- a/2/base
 +++ b/2/base
-@@ -1,1 +1,1 @@ base
+@@ -1,1 +1,1 @@
 -base
 +patched
 % patch file contents
@@ -78,7 +78,7 @@
 diff -r b55ecdccb5cf 1/base
 --- a/1/base
 +++ b/1/base
-@@ -1,1 +1,1 @@ base
+@@ -1,1 +1,1 @@
 -base
 +patched
 % qrefresh . in subdir
@@ -86,26 +86,26 @@
 diff -r b55ecdccb5cf 1/base
 --- a/1/base
 +++ b/1/base
-@@ -1,1 +1,1 @@ base
+@@ -1,1 +1,1 @@
 -base
 +patched
 diff -r b55ecdccb5cf 2/base
 --- a/2/base
 +++ b/2/base
-@@ -1,1 +1,1 @@ base
+@@ -1,1 +1,1 @@
 -base
 +patched
 % qdiff dirname
 diff -r b55ecdccb5cf 1/base
 --- a/1/base
 +++ b/1/base
-@@ -1,1 +1,1 @@ base
+@@ -1,1 +1,1 @@
 -base
 +patched
 diff -r b55ecdccb5cf 2/base
 --- a/2/base
 +++ b/2/base
-@@ -1,1 +1,1 @@ base
+@@ -1,1 +1,1 @@
 -base
 +patched
 % patch file contents
@@ -114,7 +114,7 @@
 diff -r b55ecdccb5cf 1/base
 --- a/1/base
 +++ b/1/base
-@@ -1,1 +1,1 @@ base
+@@ -1,1 +1,1 @@
 -base
 +patched
 % qrefresh in hg-root again
@@ -122,26 +122,26 @@
 diff -r b55ecdccb5cf 1/base
 --- a/1/base
 +++ b/1/base
-@@ -1,1 +1,1 @@ base
+@@ -1,1 +1,1 @@
 -base
 +patched
 diff -r b55ecdccb5cf 2/base
 --- a/2/base
 +++ b/2/base
-@@ -1,1 +1,1 @@ base
+@@ -1,1 +1,1 @@
 -base
 +patched
 % qdiff dirname
 diff -r b55ecdccb5cf 1/base
 --- a/1/base
 +++ b/1/base
-@@ -1,1 +1,1 @@ base
+@@ -1,1 +1,1 @@
 -base
 +patched
 diff -r b55ecdccb5cf 2/base
 --- a/2/base
 +++ b/2/base
-@@ -1,1 +1,1 @@ base
+@@ -1,1 +1,1 @@
 -base
 +patched
 % patch file contents
@@ -150,12 +150,12 @@
 diff -r b55ecdccb5cf 1/base
 --- a/1/base
 +++ b/1/base
-@@ -1,1 +1,1 @@ base
+@@ -1,1 +1,1 @@
 -base
 +patched
 diff -r b55ecdccb5cf 2/base
 --- a/2/base
 +++ b/2/base
-@@ -1,1 +1,1 @@ base
+@@ -1,1 +1,1 @@
 -base
 +patched
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-mq-safety	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,44 @@
+#!/bin/sh
+
+echo '[extensions]' >> $HGRCPATH
+echo 'hgext.mq =' >> $HGRCPATH
+
+hg init repo
+cd repo
+
+echo foo > foo
+hg ci -qAm 'add a file'
+
+hg qinit
+
+hg qnew foo
+echo foo >> foo
+hg qrefresh -m 'append foo'
+
+hg qnew bar
+echo bar >> foo
+hg qrefresh -m 'append bar'
+
+echo '% try to commit on top of a patch'
+echo quux >> foo
+hg ci -m 'append quux'
+
+# cheat a bit...
+mv .hg/patches .hg/patches2
+hg ci -m 'append quux'
+mv .hg/patches2 .hg/patches
+
+echo '% qpop/qrefresh on the wrong revision'
+hg qpop
+hg qpop -n patches 2>&1 | sed -e 's/\(using patch queue:\).*/\1/'
+hg qrefresh
+
+hg up -C qtip
+echo '% qpop'
+hg qpop
+
+echo '% qrefresh'
+hg qrefresh
+
+echo '% tip:'
+hg tip --template '#rev# #desc#\n'
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-mq-safety.out	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,14 @@
+% try to commit on top of a patch
+abort: cannot commit over an applied mq patch
+% qpop/qrefresh on the wrong revision
+abort: working directory revision is not qtip
+using patch queue:
+abort: popping would remove a revision not managed by this patch queue
+abort: working directory revision is not qtip
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+% qpop
+abort: popping would remove a revision not managed by this patch queue
+% qrefresh
+abort: cannot refresh a revision with children
+% tip:
+3 append quux
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-mq-symlinks	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,23 @@
+#!/bin/sh
+
+echo "[extensions]" >> $HGRCPATH
+echo "mq=" >> $HGRCPATH
+
+hg init
+hg qinit
+hg qnew base.patch
+echo a > a
+echo b > b
+hg add a b
+hg qrefresh
+$TESTDIR/readlink.py a
+
+hg qnew symlink.patch
+rm a
+ln -s b a
+hg qrefresh --git
+$TESTDIR/readlink.py a
+
+hg qpop
+hg qpush
+$TESTDIR/readlink.py a
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-mq-symlinks.out	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,6 @@
+a -> a not a symlink
+a -> b
+Now at: base.patch
+applying symlink.patch
+Now at: symlink.patch
+a -> b
--- a/tests/test-mq.out	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-mq.out	Wed Feb 06 19:57:52 2008 -0800
@@ -59,6 +59,11 @@
 % qinit -c
 A .hgignore
 A series
+% qnew should refuse bad patch names
+abort: "series" cannot be used as the name of a patch
+abort: "status" cannot be used as the name of a patch
+abort: "guards" cannot be used as the name of a patch
+abort: ".hgignore" cannot be used as the name of a patch
 % qnew implies add
 A .hgignore
 A series
@@ -83,6 +88,14 @@
   series:
 A
 B
+% qnew with uncommitted changes
+abort: local changes found, refresh first
+A somefile
+% qnew with uncommitted changes and missing file (issue 803)
+someotherfile: No such file or directory
+A somefile
+issue803.patch
+Patch queue now empty
 % qnew -m
 foo bar
 % qrefresh
@@ -91,7 +104,7 @@
 diff -r  xa
 --- a/a
 +++ b/a
-@@ -1,1 +1,2 @@ a
+@@ -1,1 +1,2 @@
  a
 +a
 % empty qrefresh
@@ -102,7 +115,7 @@
 working dir diff:
 --- a/a
 +++ b/a
-@@ -1,1 +1,2 @@ a
+@@ -1,1 +1,2 @@
  a
 +a
 % qpop
@@ -241,7 +254,7 @@
 diff -r cb9a9f314b8b a
 --- a/a
 +++ b/a
-@@ -1,1 +1,2 @@ a
+@@ -1,1 +1,2 @@
  a
 +a
 diff -r cb9a9f314b8b b/f
@@ -273,6 +286,18 @@
 0 qparent
 1 qbase foo
 2 qtip bar tip
+% bad node in status
+Now at: foo
+changeset:   0:cb9a9f314b8b
+mq status file refers to unknown node
+tag:         tip
+user:        test
+date:        Thu Jan 01 00:00:00 1970 +0000
+summary:     a
+
+mq status file refers to unknown node
+default                        0:cb9a9f314b8b
+abort: working directory revision is not qtip
 new file
 
 diff --git a/new b/new
@@ -302,7 +327,6 @@
 adding manifests
 adding file changes
 added 1 changesets with 1 changes to 1 files
-(run 'hg update' to get a working copy)
 Patch queue now empty
 applying bar
 Now at: bar
@@ -336,7 +360,6 @@
 adding manifests
 adding file changes
 added 1 changesets with 1 changes to 1 files
-(run 'hg update' to get a working copy)
 Patch queue now empty
 applying bar
 Now at: bar
@@ -360,6 +383,20 @@
 @@ -0,0 +1,1 @@
 +bar
 3 barney (foo)
+% refresh omitting an added file
+C newfile
+A newfile
+Now at: bar
+% create a git patch
+diff --git a/alexander b/alexander
+% create a git binary patch
+8ba2a2f3e77b55d03051ff9c24ad65e7  bucephalus
+diff --git a/bucephalus b/bucephalus
+% check binary patches can be popped and pushed
+Now at: addalexander
+applying addbucephalus
+Now at: addbucephalus
+8ba2a2f3e77b55d03051ff9c24ad65e7  bucephalus
 % strip again
 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
 merging foo
@@ -397,7 +434,6 @@
 adding manifests
 adding file changes
 added 1 changesets with 1 changes to 1 files
-(run 'hg update' to get a working copy)
 changeset:   1:20cbbe65cff7
 tag:         tip
 user:        test
--- a/tests/test-nested-repo	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-nested-repo	Wed Feb 06 19:57:52 2008 -0800
@@ -4,16 +4,25 @@
 cd a
 hg init b
 echo x > b/x
+
 echo '# should print nothing'
+hg add b
 hg st
-echo '# should print ? b/x'
+
+echo '# should fail'
 hg st b/x
-
 hg add b/x
 
-echo '# should print A b/x'
+echo '# should fail'
+hg add b b/x
 hg st
-echo '# should forget b/x'
-hg revert --all
-echo '# should print nothing'
+
+echo '# should arguably print nothing'
 hg st b
+
+echo a > a
+hg ci -Ama a
+
+echo '# should fail'
+hg mv a b
+hg st
--- a/tests/test-nested-repo.out	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-nested-repo.out	Wed Feb 06 19:57:52 2008 -0800
@@ -1,8 +1,9 @@
 # should print nothing
-# should print ? b/x
-? b/x
-# should print A b/x
-A b/x
-# should forget b/x
-forgetting b/x
-# should print nothing
+# should fail
+abort: path 'b/x' is inside repo 'b'
+abort: path 'b/x' is inside repo 'b'
+# should fail
+abort: path 'b/x' is inside repo 'b'
+# should arguably print nothing
+# should fail
+abort: path 'b/a' is inside repo 'b'
--- a/tests/test-newbranch	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-newbranch	Wed Feb 06 19:57:52 2008 -0800
@@ -41,6 +41,15 @@
 hg log -qr foo
 cat .hg/branch.cache
 
+echo % push should update the branch cache
+hg init ../target
+echo % pushing just rev 0
+hg push -qr 0 ../target
+cat ../target/.hg/branch.cache
+echo % pushing everything
+hg push -qf ../target
+cat ../target/.hg/branch.cache
+
 echo % update with no arguments: tipmost revision of the current branch
 hg up -q -C 0
 hg up -q
--- a/tests/test-newbranch.out	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-newbranch.out	Wed Feb 06 19:57:52 2008 -0800
@@ -83,6 +83,15 @@
 bf1bc2f45e834c75404d0ddab57d53beab56e2f8 default
 4909a3732169c0c20011c4f4b8fdff4e3d89b23f foo
 67ec16bde7f1575d523313b9bca000f6a6f12dca bar
+% push should update the branch cache
+% pushing just rev 0
+be8523e69bf892e25817fc97187516b3c0804ae4 0
+be8523e69bf892e25817fc97187516b3c0804ae4 default
+% pushing everything
+4909a3732169c0c20011c4f4b8fdff4e3d89b23f 4
+bf1bc2f45e834c75404d0ddab57d53beab56e2f8 default
+4909a3732169c0c20011c4f4b8fdff4e3d89b23f foo
+67ec16bde7f1575d523313b9bca000f6a6f12dca bar
 % update with no arguments: tipmost revision of the current branch
 bf1bc2f45e83
 4909a3732169 (foo) tip
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-newcgi	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,91 @@
+#!/bin/sh
+# This tests if CGI files from after d0db3462d568 but
+# before d74fc8dec2b4 still work.
+
+hg init test
+
+cat >hgweb.cgi <<HGWEB
+#!/usr/bin/env python
+#
+# An example CGI script to use hgweb, edit as necessary
+
+import cgitb
+cgitb.enable()
+
+from mercurial import demandimport; demandimport.enable()
+from mercurial.hgweb import hgweb
+from mercurial.hgweb import wsgicgi
+from mercurial.hgweb.request import wsgiapplication
+
+def make_web_app():
+	return hgweb("test", "Empty test repository")
+
+wsgicgi.launch(wsgiapplication(make_web_app))
+HGWEB
+chmod 755 hgweb.cgi
+
+cat >hgweb.config <<HGWEBDIRCONF
+[paths]
+test = test
+HGWEBDIRCONF
+
+cat >hgwebdir.cgi <<HGWEBDIR
+#!/usr/bin/env python
+#
+# An example CGI script to export multiple hgweb repos, edit as necessary
+
+import cgitb
+cgitb.enable()
+
+from mercurial import demandimport; demandimport.enable()
+from mercurial.hgweb import hgwebdir
+from mercurial.hgweb import wsgicgi
+from mercurial.hgweb.request import wsgiapplication
+
+def make_web_app():
+	return hgwebdir("hgweb.config")
+
+wsgicgi.launch(wsgiapplication(make_web_app))
+HGWEBDIR
+chmod 755 hgwebdir.cgi
+
+DOCUMENT_ROOT="/var/www/hg"; export DOCUMENT_ROOT
+GATEWAY_INTERFACE="CGI/1.1"; export GATEWAY_INTERFACE
+HTTP_ACCEPT="text/xml,application/xml,application/xhtml+xml,text/html;q=0.9,text/plain;q=0.8,image/png,*/*;q=0.5"; export HTTP_ACCEPT
+HTTP_ACCEPT_CHARSET="ISO-8859-1,utf-8;q=0.7,*;q=0.7"; export HTTP_ACCEPT_CHARSET
+HTTP_ACCEPT_ENCODING="gzip,deflate"; export HTTP_ACCEPT_ENCODING
+HTTP_ACCEPT_LANGUAGE="en-us,en;q=0.5"; export HTTP_ACCEPT_LANGUAGE
+HTTP_CACHE_CONTROL="max-age=0"; export HTTP_CACHE_CONTROL
+HTTP_CONNECTION="keep-alive"; export HTTP_CONNECTION
+HTTP_HOST="hg.omnifarious.org"; export HTTP_HOST
+HTTP_KEEP_ALIVE="300"; export HTTP_KEEP_ALIVE
+HTTP_USER_AGENT="Mozilla/5.0 (X11; U; Linux x86_64; en-US; rv:1.8.0.4) Gecko/20060608 Ubuntu/dapper-security Firefox/1.5.0.4"; export HTTP_USER_AGENT
+PATH_INFO="/"; export PATH_INFO
+PATH_TRANSLATED="/var/www/hg/index.html"; export PATH_TRANSLATED
+QUERY_STRING=""; export QUERY_STRING
+REMOTE_ADDR="127.0.0.2"; export REMOTE_ADDR
+REMOTE_PORT="44703"; export REMOTE_PORT
+REQUEST_METHOD="GET"; export REQUEST_METHOD
+REQUEST_URI="/test/"; export REQUEST_URI
+SCRIPT_FILENAME="/home/hopper/hg_public/test.cgi"; export SCRIPT_FILENAME
+SCRIPT_NAME="/test"; export SCRIPT_NAME
+SCRIPT_URI="http://hg.omnifarious.org/test/"; export SCRIPT_URI
+SCRIPT_URL="/test/"; export SCRIPT_URL
+SERVER_ADDR="127.0.0.1"; export SERVER_ADDR
+SERVER_ADMIN="eric@localhost"; export SERVER_ADMIN
+SERVER_NAME="hg.omnifarious.org"; export SERVER_NAME
+SERVER_PORT="80"; export SERVER_PORT
+SERVER_PROTOCOL="HTTP/1.1"; export SERVER_PROTOCOL
+SERVER_SIGNATURE="<address>Apache/2.0.53 (Fedora) Server at hg.omnifarious.org Port 80</address>\; export SERVER_SIGNATURE
+"
+SERVER_SOFTWARE="Apache/2.0.53 (Fedora)"; export SERVER_SOFTWARE
+python hgweb.cgi >page1 2>&1 ; echo $?
+python hgwebdir.cgi >page2 2>&1 ; echo $?
+PATH_INFO="/test/"
+PATH_TRANSLATED="/var/something/test.cgi"
+REQUEST_URI="/test/test/"
+SCRIPT_URI="http://hg.omnifarious.org/test/test/"
+SCRIPT_URL="/test/test/"
+python hgwebdir.cgi >page3 2>&1 ; echo $?
+fgrep -i error page1 page2 page3 && exit 1
+exit 0
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-newcgi.out	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,3 @@
+0
+0
+0
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-newercgi	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,84 @@
+#!/bin/sh
+# This is a rudimentary test of the CGI files as of d74fc8dec2b4.
+
+hg init test
+
+cat >hgweb.cgi <<HGWEB
+#!/usr/bin/env python
+#
+# An example CGI script to use hgweb, edit as necessary
+
+import cgitb
+cgitb.enable()
+
+from mercurial import demandimport; demandimport.enable()
+from mercurial.hgweb import hgweb
+from mercurial.hgweb import wsgicgi
+
+application = hgweb("test", "Empty test repository")
+wsgicgi.launch(application)
+HGWEB
+chmod 755 hgweb.cgi
+
+cat >hgweb.config <<HGWEBDIRCONF
+[paths]
+test = test
+HGWEBDIRCONF
+
+cat >hgwebdir.cgi <<HGWEBDIR
+#!/usr/bin/env python
+#
+# An example CGI script to export multiple hgweb repos, edit as necessary
+
+import cgitb
+cgitb.enable()
+
+from mercurial import demandimport; demandimport.enable()
+from mercurial.hgweb import hgwebdir
+from mercurial.hgweb import wsgicgi
+
+application = hgwebdir("hgweb.config")
+wsgicgi.launch(application)
+HGWEBDIR
+chmod 755 hgwebdir.cgi
+
+DOCUMENT_ROOT="/var/www/hg"; export DOCUMENT_ROOT
+GATEWAY_INTERFACE="CGI/1.1"; export GATEWAY_INTERFACE
+HTTP_ACCEPT="text/xml,application/xml,application/xhtml+xml,text/html;q=0.9,text/plain;q=0.8,image/png,*/*;q=0.5"; export HTTP_ACCEPT
+HTTP_ACCEPT_CHARSET="ISO-8859-1,utf-8;q=0.7,*;q=0.7"; export HTTP_ACCEPT_CHARSET
+HTTP_ACCEPT_ENCODING="gzip,deflate"; export HTTP_ACCEPT_ENCODING
+HTTP_ACCEPT_LANGUAGE="en-us,en;q=0.5"; export HTTP_ACCEPT_LANGUAGE
+HTTP_CACHE_CONTROL="max-age=0"; export HTTP_CACHE_CONTROL
+HTTP_CONNECTION="keep-alive"; export HTTP_CONNECTION
+HTTP_HOST="hg.omnifarious.org"; export HTTP_HOST
+HTTP_KEEP_ALIVE="300"; export HTTP_KEEP_ALIVE
+HTTP_USER_AGENT="Mozilla/5.0 (X11; U; Linux x86_64; en-US; rv:1.8.0.4) Gecko/20060608 Ubuntu/dapper-security Firefox/1.5.0.4"; export HTTP_USER_AGENT
+PATH_INFO="/"; export PATH_INFO
+PATH_TRANSLATED="/var/www/hg/index.html"; export PATH_TRANSLATED
+QUERY_STRING=""; export QUERY_STRING
+REMOTE_ADDR="127.0.0.2"; export REMOTE_ADDR
+REMOTE_PORT="44703"; export REMOTE_PORT
+REQUEST_METHOD="GET"; export REQUEST_METHOD
+REQUEST_URI="/test/"; export REQUEST_URI
+SCRIPT_FILENAME="/home/hopper/hg_public/test.cgi"; export SCRIPT_FILENAME
+SCRIPT_NAME="/test"; export SCRIPT_NAME
+SCRIPT_URI="http://hg.omnifarious.org/test/"; export SCRIPT_URI
+SCRIPT_URL="/test/"; export SCRIPT_URL
+SERVER_ADDR="127.0.0.1"; export SERVER_ADDR
+SERVER_ADMIN="eric@localhost"; export SERVER_ADMIN
+SERVER_NAME="hg.omnifarious.org"; export SERVER_NAME
+SERVER_PORT="80"; export SERVER_PORT
+SERVER_PROTOCOL="HTTP/1.1"; export SERVER_PROTOCOL
+SERVER_SIGNATURE="<address>Apache/2.0.53 (Fedora) Server at hg.omnifarious.org Port 80</address>\; export SERVER_SIGNATURE
+"
+SERVER_SOFTWARE="Apache/2.0.53 (Fedora)"; export SERVER_SOFTWARE
+python hgweb.cgi >page1 2>&1 ; echo $?
+python hgwebdir.cgi >page2 2>&1 ; echo $?
+PATH_INFO="/test/"
+PATH_TRANSLATED="/var/something/test.cgi"
+REQUEST_URI="/test/test/"
+SCRIPT_URI="http://hg.omnifarious.org/test/test/"
+SCRIPT_URL="/test/test/"
+python hgwebdir.cgi >page3 2>&1 ; echo $?
+fgrep -i error page1 page2 page3 && exit 1
+exit 0
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-newercgi.out	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,3 @@
+0
+0
+0
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-no-symlinks	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,47 @@
+#!/bin/sh
+
+"$TESTDIR/hghave" no-symlink || exit 80
+
+# The following script was used to create the bundle:
+#
+# hg init symlinks
+# cd symlinks
+# echo a > a
+# mkdir d
+# echo b > d/b
+# ln -s a a.lnk
+# ln -s d/b d/b.lnk
+# hg ci -Am t
+# hg bundle --base null ../test-no-symlinks.hg
+
+# Extract a symlink on a platform not supporting them
+echo % unbundle
+hg init t
+cd t
+hg pull -q "$TESTDIR/test-no-symlinks.hg"
+hg update
+
+cat a.lnk && echo
+cat d/b.lnk && echo
+
+# Copy a symlink and move another
+echo % move and copy
+hg copy a.lnk d/a2.lnk
+hg mv d/b.lnk b2.lnk
+hg ci -Am copy
+cat d/a2.lnk && echo
+cat b2.lnk && echo
+
+# Bundle and extract again
+echo % bundle
+hg bundle --base null ../symlinks.hg
+cd ..
+
+hg init t2
+cd t2
+hg pull ../symlinks.hg
+hg update
+
+cat a.lnk && echo
+cat d/a2.lnk && echo
+cat b2.lnk && echo
Binary file tests/test-no-symlinks.hg has changed
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-no-symlinks.out	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,20 @@
+% unbundle
+4 files updated, 0 files merged, 0 files removed, 0 files unresolved
+a
+d/b
+% move and copy
+a
+d/b
+% bundle
+2 changesets found
+pulling from ../symlinks.hg
+requesting all changes
+adding changesets
+adding manifests
+adding file changes
+added 2 changesets with 6 changes to 6 files
+(run 'hg update' to get a working copy)
+5 files updated, 0 files merged, 0 files removed, 0 files unresolved
+a
+a
+d/b
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-non-interactive-wsgi	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,71 @@
+#!/bin/sh
+# Tests if hgweb can run without touching sys.stdin, as is required
+# by the WSGI standard and strictly implemented by mod_wsgi.
+
+mkdir repo
+cd repo
+hg init
+echo foo > bar
+hg add bar
+hg commit -m "test" -d "0 0"
+hg tip
+
+cat > request.py <<EOF
+from mercurial import dispatch
+from mercurial.hgweb.hgweb_mod import hgweb
+from mercurial.ui import ui
+from mercurial import hg
+from StringIO import StringIO
+import os, sys
+
+class FileLike(object):
+    def __init__(self, real):
+        self.real = real
+    def fileno(self):
+        print >> sys.__stdout__, 'FILENO'
+        return self.real.fileno()
+    def read(self):
+        print >> sys.__stdout__, 'READ'
+        return self.real.read()
+    def readline(self):
+        print >> sys.__stdout__, 'READLINE'
+        return self.real.readline()
+    def isatty(self):
+        print >> sys.__stdout__, 'ISATTY'
+        return False
+
+sys.stdin = FileLike(sys.stdin)
+errors = StringIO()
+input = StringIO()
+output = StringIO()
+
+def startrsp(headers, data):
+	print '---- HEADERS'
+	print headers
+	print '---- DATA'
+	print data
+	return output.write
+
+env = {
+	'wsgi.version': (1, 0),
+	'wsgi.url_scheme': 'http',
+	'wsgi.errors': errors,
+	'wsgi.input': input,
+	'wsgi.multithread': False,
+	'wsgi.multiprocess': False,
+	'wsgi.run_once': False,
+	'REQUEST_METHOD': 'GET',
+	'SCRIPT_NAME': '',
+	'PATH_INFO': '',
+	'QUERY_STRING': '',
+	'SERVER_NAME': '127.0.0.1',
+	'SERVER_PORT': os.environ['HGPORT'],
+	'SERVER_PROTOCOL': 'HTTP/1.0'
+}
+
+hgweb('.')(env, startrsp)
+print '---- ERRORS'
+print errors.getvalue()
+EOF
+
+python request.py
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-non-interactive-wsgi.out	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,12 @@
+changeset:   0:61c9426e69fe
+tag:         tip
+user:        test
+date:        Thu Jan 01 00:00:00 1970 +0000
+summary:     test
+
+---- HEADERS
+200 Script output follows
+---- DATA
+[('Content-Type', 'text/html; charset=ascii')]
+---- ERRORS
+
--- a/tests/test-notfound	Thu Jul 26 07:56:27 2007 -0400
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,9 +0,0 @@
-#!/bin/sh
-
-hg init
-
-echo "Is there an error message when trying to diff non-existing files?"
-hg diff not found
-
-echo "Is there an error message when trying to add non-existing files?"
-hg add not found
--- a/tests/test-notfound.out	Thu Jul 26 07:56:27 2007 -0400
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,6 +0,0 @@
-Is there an error message when trying to diff non-existing files?
-found: No such file or directory
-not: No such file or directory
-Is there an error message when trying to add non-existing files?
-found: No such file or directory
-not: No such file or directory
--- a/tests/test-notify.out	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-notify.out	Wed Feb 06 19:57:52 2008 -0800
@@ -30,7 +30,7 @@
 diff -r cb9a9f314b8b -r 0647d048b600 a
 --- a/a	Thu Jan 01 00:00:00 1970 +0000
 +++ b/a	Thu Jan 01 00:00:01 1970 +0000
-@@ -1,1 +1,2 @@ a
+@@ -1,1 +1,2 @@
  a
 +a
 (run 'hg update' to get a working copy)
@@ -60,7 +60,7 @@
 diff -r cb9a9f314b8b -r 0647d048b600 a
 --- a/a	Thu Jan 01 00:00:00 1970 +0000
 +++ b/a	Thu Jan 01 00:00:01 1970 +0000
-@@ -1,1 +1,2 @@ a
+@@ -1,1 +1,2 @@
  a
 +a
 (run 'hg update' to get a working copy)
--- a/tests/test-oldcgi	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-oldcgi	Wed Feb 06 19:57:52 2008 -0800
@@ -1,4 +1,5 @@
 #!/bin/sh
+# This tests if CGI files from before d0db3462d568 still work.
 
 hg init test
 
@@ -88,13 +89,13 @@
 SERVER_SIGNATURE="<address>Apache/2.0.53 (Fedora) Server at hg.omnifarious.org Port 80</address>\; export SERVER_SIGNATURE
 "
 SERVER_SOFTWARE="Apache/2.0.53 (Fedora)"; export SERVER_SOFTWARE
-./hgweb.cgi >page1 2>&1 ; echo $?
-./hgwebdir.cgi >page2 2>&1 ; echo $?
+python hgweb.cgi >page1 2>&1 ; echo $?
+python hgwebdir.cgi >page2 2>&1 ; echo $?
 PATH_INFO="/test/"
 PATH_TRANSLATED="/var/something/test.cgi"
 REQUEST_URI="/test/test/"
 SCRIPT_URI="http://hg.omnifarious.org/test/test/"
 SCRIPT_URL="/test/test/"
-./hgwebdir.cgi >page3 2>&1 ; echo $?
+python hgwebdir.cgi >page3 2>&1 ; echo $?
 fgrep -i error page1 page2 page3 && exit 1
 exit 0
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-parentrevspec	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,69 @@
+#!/bin/sh
+
+commit()
+{
+    msg=$1
+    p1=$2
+    p2=$3
+
+    if [ "$p1" ]; then
+	hg up -qC $p1
+    fi
+
+    if [ "$p2" ]; then
+	HGMERGE=true hg merge -q $p2
+    fi
+
+    echo >> foo
+
+    hg commit -d '0 0' -qAm "$msg" foo
+}
+
+hg init repo
+cd repo
+
+echo '[extensions]' > .hg/hgrc
+echo 'hgext.parentrevspec =' >> .hg/hgrc
+
+commit '0: add foo'
+commit '1: change foo 1'
+commit '2: change foo 2a'
+commit '3: change foo 3a'
+commit '4: change foo 2b' 1
+commit '5: merge' 3 4
+commit '6: change foo again'
+
+hg log --template '#rev#:#node|short# #parents#\n'
+echo
+
+lookup()
+{
+    for rev in "$@"; do
+	printf "$rev: "
+	hg id -nr $rev
+    done
+    true
+}
+
+tipnode=`hg id -ir tip`
+
+echo 'should work with tag/branch/node/rev'
+for r in tip default $tipnode 6; do
+    lookup "$r^"
+done
+echo
+
+echo 'some random lookups'
+lookup "6^^" "6^^^" "6^^^^" "6^^^^^" "6^^^^^^" "6^1" "6^2" "6^^2" "6^1^2" "6^^3"
+lookup "6~" "6~1" "6~2" "6~3" "6~4" "6~5" "6~42" "6~1^2" "6~1^2~2"
+echo
+
+echo 'with a tag "6^" pointing to rev 1'
+hg tag -l -r 1 "6^"
+lookup "6^" "6^1" "6~1" "6^^"
+echo
+
+echo 'with a tag "foo^bar" pointing to rev 2'
+hg tag -l -r 2 "foo^bar"
+lookup "foo^bar" "foo^bar^"
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-parentrevspec.out	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,44 @@
+6:755d1e0d79e9 
+5:9ce2ce29723a 3:a3e00c7dbf11 4:bb4475edb621 
+4:bb4475edb621 1:5d953a1917d1 
+3:a3e00c7dbf11 
+2:befc7d89d081 
+1:5d953a1917d1 
+0:837088b6e1d9 
+
+should work with tag/branch/node/rev
+tip^: 5
+default^: 5
+755d1e0d79e9^: 5
+6^: 5
+
+some random lookups
+6^^: 3
+6^^^: 2
+6^^^^: 1
+6^^^^^: 0
+6^^^^^^: -1
+6^1: 5
+6^2: abort: unknown revision '6^2'!
+6^^2: 4
+6^1^2: 4
+6^^3: abort: unknown revision '6^^3'!
+6~: abort: unknown revision '6~'!
+6~1: 5
+6~2: 3
+6~3: 2
+6~4: 1
+6~5: 0
+6~42: -1
+6~1^2: 4
+6~1^2~2: 0
+
+with a tag "6^" pointing to rev 1
+6^: 1
+6^1: 5
+6~1: 5
+6^^: 3
+
+with a tag "foo^bar" pointing to rev 2
+foo^bar: 2
+foo^bar^: abort: unknown revision 'foo^bar^'!
--- a/tests/test-parents	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-parents	Wed Feb 06 19:57:52 2008 -0800
@@ -13,6 +13,12 @@
 hg ci -Ama -d '1 0'
 echo b >> b
 hg ci -Amb -d '2 0'
+echo c > c
+hg ci -Amc -d '3 0'
+hg up -C 1
+echo d > c
+hg ci -Amc2 -d '4 0'
+hg up -C 3
 
 echo % hg parents
 hg parents
@@ -20,6 +26,12 @@
 echo % hg parents a
 hg parents a
 
+echo % hg parents c, single revision
+hg parents c
+
+echo % hg parents -r 3 c
+hg parents -r 3 c
+
 echo % hg parents -r 2
 hg parents -r 2
 
@@ -41,4 +53,15 @@
 cd ..
 hg parents -r 2 glob:a
 
+echo % merge working dir with 2 parents, hg parents c
+HGMERGE=true hg merge
+hg parents c
+
+echo % merge working dir with 1 parent, hg parents
+hg up -C 2
+HGMERGE=true hg merge -r 4
+hg parents
+echo % merge working dir with 1 parent, hg parents c
+hg parents c
+
 true
--- a/tests/test-parents.out	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-parents.out	Wed Feb 06 19:57:52 2008 -0800
@@ -1,19 +1,30 @@
 % no working directory
 adding a
 adding b
+adding c
+1 files updated, 0 files merged, 1 files removed, 0 files unresolved
+adding c
+2 files updated, 0 files merged, 0 files removed, 0 files unresolved
 % hg parents
-changeset:   2:6cfac479f009
-tag:         tip
+changeset:   3:02d851b7e549
 user:        test
-date:        Thu Jan 01 00:00:02 1970 +0000
-summary:     b
+date:        Thu Jan 01 00:00:03 1970 +0000
+summary:     c
 
 % hg parents a
-changeset:   0:b6a1406d8886
+changeset:   1:d786049f033a
 user:        test
-date:        Thu Jan 01 00:00:00 1970 +0000
-summary:     ab
+date:        Thu Jan 01 00:00:01 1970 +0000
+summary:     a
 
+% hg parents c, single revision
+changeset:   3:02d851b7e549
+user:        test
+date:        Thu Jan 01 00:00:03 1970 +0000
+summary:     c
+
+% hg parents -r 3 c
+abort: 'c' not found in manifest!
 % hg parents -r 2
 changeset:   1:d786049f033a
 user:        test
@@ -21,24 +32,64 @@
 summary:     a
 
 % hg parents -r 2 a
-changeset:   0:b6a1406d8886
+changeset:   1:d786049f033a
 user:        test
-date:        Thu Jan 01 00:00:00 1970 +0000
-summary:     ab
+date:        Thu Jan 01 00:00:01 1970 +0000
+summary:     a
 
 % hg parents -r 2 ../a
 abort: ../a not under root
 % cd dir; hg parents -r 2 ../a
-changeset:   0:b6a1406d8886
+changeset:   1:d786049f033a
 user:        test
-date:        Thu Jan 01 00:00:00 1970 +0000
-summary:     ab
+date:        Thu Jan 01 00:00:01 1970 +0000
+summary:     a
 
 % hg parents -r 2 path:a
-changeset:   0:b6a1406d8886
+changeset:   1:d786049f033a
 user:        test
-date:        Thu Jan 01 00:00:00 1970 +0000
-summary:     ab
+date:        Thu Jan 01 00:00:01 1970 +0000
+summary:     a
 
 % hg parents -r 2 glob:a
 abort: can only specify an explicit file name
+% merge working dir with 2 parents, hg parents c
+merging c
+0 files updated, 1 files merged, 0 files removed, 0 files unresolved
+(branch merge, don't forget to commit)
+changeset:   3:02d851b7e549
+user:        test
+date:        Thu Jan 01 00:00:03 1970 +0000
+summary:     c
+
+changeset:   4:48cee28d4b4e
+tag:         tip
+parent:      1:d786049f033a
+user:        test
+date:        Thu Jan 01 00:00:04 1970 +0000
+summary:     c2
+
+% merge working dir with 1 parent, hg parents
+0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+(branch merge, don't forget to commit)
+changeset:   2:6cfac479f009
+user:        test
+date:        Thu Jan 01 00:00:02 1970 +0000
+summary:     b
+
+changeset:   4:48cee28d4b4e
+tag:         tip
+parent:      1:d786049f033a
+user:        test
+date:        Thu Jan 01 00:00:04 1970 +0000
+summary:     c2
+
+% merge working dir with 1 parent, hg parents c
+changeset:   4:48cee28d4b4e
+tag:         tip
+parent:      1:d786049f033a
+user:        test
+date:        Thu Jan 01 00:00:04 1970 +0000
+summary:     c2
+
--- a/tests/test-patchbomb	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-patchbomb	Wed Feb 06 19:57:52 2008 -0800
@@ -1,20 +1,46 @@
 #!/bin/sh
 
+fixheaders()
+{
+    sed -e 's/\(Message-Id:.*@\).*/\1/'  \
+        -e 's/\(In-Reply-To:.*@\).*/\1/' \
+        -e 's/===.*/===/'
+}
+
 echo "[extensions]" >> $HGRCPATH
 echo "patchbomb=" >> $HGRCPATH
 
-hg init
+hg init t
+cd t
 echo a > a
 hg commit -Ama -d '1 0'
 
 hg email --date '1970-1-1 0:1' -n -f quux -t foo -c bar tip | \
-  sed -e 's/\(Message-Id:.*@\).*/\1/'
+  fixheaders
 
 echo b > b
 hg commit -Amb -d '2 0'
 
 hg email --date '1970-1-1 0:2' -n -f quux -t foo -c bar -s test 0:tip | \
-  sed -e 's/\(Message-Id:.*@\).*/\1/' | \
-  sed -e 's/\(In-Reply-To:.*@\).*/\1/'
+  fixheaders
 
 hg email -m test.mbox -f quux -t foo -c bar -s test 0:tip
+
+cd ..
+
+hg clone -q t t2
+cd t2
+echo c > c
+hg commit -Amc -d '3 0'
+
+cat > description <<EOF
+a multiline
+
+description
+EOF
+
+echo % test bundle and description
+hg email --date '1970-1-1 0:3' -n -f quux -t foo \
+    -c bar -s test -r tip -b --desc description | \
+    fixheaders
+
--- a/tests/test-patchbomb.out	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-patchbomb.out	Wed Feb 06 19:57:52 2008 -0800
@@ -107,3 +107,41 @@
 Writing [PATCH 0 of 2] test ...
 Writing [PATCH 1 of 2] a ...
 Writing [PATCH 2 of 2] b ...
+adding c
+% test bundle and description
+searching for changes
+1 changesets found
+
+Displaying test ...
+Content-Type: multipart/mixed; boundary="===
+MIME-Version: 1.0
+Subject: test
+Message-Id: <patchbomb.180@
+Date: Thu, 01 Jan 1970 00:03:00 +0000
+From: quux
+To: foo
+Cc: bar
+
+--===
+Content-Type: text/plain; charset="us-ascii"
+MIME-Version: 1.0
+Content-Transfer-Encoding: 7bit
+
+a multiline
+
+description
+
+--===
+Content-Type: application/x-mercurial-bundle
+MIME-Version: 1.0
+Content-Disposition: attachment; filename="bundle.hg"
+Content-Transfer-Encoding: base64
+
+SEcxMEJaaDkxQVkmU1nvR7I3AAAN////lFYQWj1/4HwRkdC/AywIAk0E4pfoSIIIgQCgGEQOcLAA
+2tA1VPyp4mkeoG0EaaPU0GTT1GjRiNPIg9CZGBqZ6UbU9J+KFU09DNUaGgAAAAAANAGgAAAAA1U8
+oGgAADQGgAANNANAAAAAAZipFLz3XoakCEQB3PVPyHJVi1iYkAAKQAZQGpQGZESInRnCFMqLDla2
+Bx3qfRQeA2N4lnzKkAmP8kR2asievLLXXebVU8Vg4iEBqcJNJAxIapSU6SM4888ZAciRG6MYAIEE
+SlIBpFisgGkyRjX//TMtfcUAEsGu56+YnE1OlTZmzKm8BSu2rvo4rHAYYaadIFFuTy0LYgIkgLVD
+sgVa2F19D1tx9+hgbAygLgQwaIqcDdgA4BjQgIiz/AEP72++llgDKhKducqodGE4B0ETqF3JFOFC
+Q70eyNw=
+--===
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-paths	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,10 @@
+#!/bin/sh
+hg init a
+hg clone a b
+cd a
+echo '[paths]' >> .hg/hgrc
+echo 'dupe = ../b' >> .hg/hgrc
+hg in dupe | fgrep '../'
+cd ..
+hg -R a in dupe | fgrep '../'
+true
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-paths.out	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,1 @@
+0 files updated, 0 files merged, 0 files removed, 0 files unresolved
--- a/tests/test-permissions	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-permissions	Wed Feb 06 19:57:52 2008 -0800
@@ -1,6 +1,7 @@
 #!/bin/sh
 
-hg init
+hg init t
+cd t
 echo foo > a
 hg add a
 hg commit -m "1" -d "1000000 0"
@@ -12,4 +13,6 @@
 chmod -w .hg/store/data/a.i
 echo barber > a
 hg commit -m "2" -d "1000000 0" 2>/dev/null || echo commit failed
-
+chmod -w .
+hg diff --nodates
+chmod +w .
--- a/tests/test-permissions.out	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-permissions.out	Wed Feb 06 19:57:52 2008 -0800
@@ -14,3 +14,9 @@
 checking files
 1 files, 1 changesets, 1 total revisions
 commit failed
+diff -r c1fab96507ef a
+--- a/a
++++ b/a
+@@ -1,1 +1,1 @@
+-foo
++barber
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-profile	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,15 @@
+#!/bin/sh
+
+echo % test --time
+hg --time help -q help 2>&1 | grep Time > /dev/null || echo --time failed
+
+echo % test --profile
+if "$TESTDIR/hghave" -q hotshot; then
+    # hotshot might be missing for licensing issues
+    hg --profile help -q help 2>&1 | grep ncalls > /dev/null || echo --profile failed
+fi
+
+echo % test --lsprof
+if "$TESTDIR/hghave" -q lsprof; then
+    hg --lsprof help -q help 2>&1 | grep CallCount > /dev/null || echo --lsprof failed
+fi
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-profile.out	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,3 @@
+% test --time
+% test --profile
+% test --lsprof
--- a/tests/test-pull	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-pull	Wed Feb 06 19:57:52 2008 -0800
@@ -7,17 +7,17 @@
 hg addremove
 hg commit -m 1
 hg verify
-hg serve -p 20059 -d --pid-file=hg.pid
+hg serve -p $HGPORT -d --pid-file=hg.pid
 cat hg.pid >> $DAEMON_PIDS
 cd ..
 
-http_proxy= hg clone --pull http://localhost:20059/ copy
+http_proxy= hg clone --pull http://localhost:$HGPORT/ copy | sed -e 's,:[0-9][0-9]*/,/,'
 cd copy
 hg verify
 hg co
 cat foo
 hg manifest --debug
-hg pull
+hg pull | sed -e 's,:[0-9][0-9]*/,/,'
 
 echo % issue 622
 cd ..
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-pull-r	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,27 @@
+#!/bin/sh
+
+hg init repo
+cd repo
+echo foo > foo
+hg ci -qAm 'add foo' -d '0 0'
+echo >> foo
+hg ci -m 'change foo' -d '0 0'
+hg up -qC 0
+echo bar > bar
+hg ci -qAm 'add bar' -d '0 0'
+hg log
+cd ..
+hg init copy
+cd copy
+
+echo '% pull -r 0'
+hg pull -qr 0 ../repo
+hg log
+
+echo '% pull -r 1'
+hg pull -qr 1 ../repo
+hg log
+
+# this used to abort: received changelog group is empty
+echo '% pull -r 1 again'
+hg pull -qr 1 ../repo
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-pull-r.out	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,37 @@
+changeset:   2:effea6de0384
+tag:         tip
+parent:      0:bbd179dfa0a7
+user:        test
+date:        Thu Jan 01 00:00:00 1970 +0000
+summary:     add bar
+
+changeset:   1:ed1b79f46b9a
+user:        test
+date:        Thu Jan 01 00:00:00 1970 +0000
+summary:     change foo
+
+changeset:   0:bbd179dfa0a7
+user:        test
+date:        Thu Jan 01 00:00:00 1970 +0000
+summary:     add foo
+
+% pull -r 0
+changeset:   0:bbd179dfa0a7
+tag:         tip
+user:        test
+date:        Thu Jan 01 00:00:00 1970 +0000
+summary:     add foo
+
+% pull -r 1
+changeset:   1:ed1b79f46b9a
+tag:         tip
+user:        test
+date:        Thu Jan 01 00:00:00 1970 +0000
+summary:     change foo
+
+changeset:   0:bbd179dfa0a7
+user:        test
+date:        Thu Jan 01 00:00:00 1970 +0000
+summary:     add foo
+
+% pull -r 1 again
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-pull-update	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,29 @@
+#!/bin/sh
+#
+
+hg init t
+cd t
+echo 1 > foo
+hg ci -Am m
+
+cd ..
+hg clone t tt
+cd tt
+echo 1.1 > foo
+hg ci -Am m
+
+cd ../t
+echo 1.2 > foo
+hg ci -Am m
+echo % should fail
+hg pull -u ../tt
+
+cd ../tt
+echo % should fail
+hg pull -u ../t
+HGMERGE=true hg merge
+hg ci -mm
+
+cd ../t
+echo % should work
+hg pull -u ../tt
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-pull-update.out	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,31 @@
+adding foo
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+% should fail
+pulling from ../tt
+searching for changes
+adding changesets
+adding manifests
+adding file changes
+added 1 changesets with 1 changes to 1 files (+1 heads)
+not updating, since new heads added
+(run 'hg heads' to see heads, 'hg merge' to merge)
+% should fail
+pulling from ../t
+searching for changes
+adding changesets
+adding manifests
+adding file changes
+added 1 changesets with 1 changes to 1 files (+1 heads)
+not updating, since new heads added
+(run 'hg heads' to see heads, 'hg merge' to merge)
+merging foo
+0 files updated, 1 files merged, 0 files removed, 0 files unresolved
+(branch merge, don't forget to commit)
+% should work
+pulling from ../tt
+searching for changes
+adding changesets
+adding manifests
+adding file changes
+added 1 changesets with 1 changes to 1 files (-1 heads)
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
--- a/tests/test-pull.out	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-pull.out	Wed Feb 06 19:57:52 2008 -0800
@@ -17,8 +17,8 @@
 1 files, 1 changesets, 1 total revisions
 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
 foo
-2ed2a3912a0b24502043eae84ee4b279c18b90dd 644 foo
-pulling from http://localhost:20059/
+2ed2a3912a0b24502043eae84ee4b279c18b90dd 644   foo
+pulling from http://localhost/
 searching for changes
 no changes found
 % issue 622
--- a/tests/test-purge	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-purge	Wed Feb 06 19:57:52 2008 -0800
@@ -101,6 +101,13 @@
 hg revert --all --quiet
 ls
 
+echo '% tracked file in ignored directory (issue621)'
+echo directory >> .hgignore
+hg ci -m 'ignore directory'
+touch untracked_file
+hg purge -p
+hg purge -v
+
 echo % skip excluded files
 touch excluded_file
 hg purge -p -X excluded_file
--- a/tests/test-purge.out	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-purge.out	Wed Feb 06 19:57:52 2008 -0800
@@ -59,6 +59,9 @@
 Removing file untracked_file
 directory
 r1
+% tracked file in ignored directory (issue621)
+untracked_file
+Removing file untracked_file
 % skip excluded files
 directory
 excluded_file
--- a/tests/test-push-http	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-push-http	Wed Feb 06 19:57:52 2008 -0800
@@ -16,33 +16,33 @@
 cd ../test
 
 echo % expect ssl error
-hg serve -p 20059 -d --pid-file=hg.pid
+hg serve -p $HGPORT -d --pid-file=hg.pid
 cat hg.pid >> $DAEMON_PIDS
-hg --cwd ../test2 push http://localhost:20059/
+hg --cwd ../test2 push http://localhost:$HGPORT/ | sed -e 's,:[0-9][0-9]*/,/,'
 kill `cat hg.pid`
 
 echo % expect authorization error
 echo '[web]' > .hg/hgrc
 echo 'push_ssl = false' >> .hg/hgrc
-hg serve -p 20059 -d --pid-file=hg.pid
+hg serve -p $HGPORT -d --pid-file=hg.pid
 cat hg.pid >> $DAEMON_PIDS
-hg --cwd ../test2 push http://localhost:20059/
+hg --cwd ../test2 push http://localhost:$HGPORT/ | sed -e 's,:[0-9][0-9]*/,/,'
 kill `cat hg.pid`
 
 echo % expect authorization error: must have authorized user
 echo 'allow_push = unperson' >> .hg/hgrc
-hg serve -p 20059 -d --pid-file=hg.pid
+hg serve -p $HGPORT -d --pid-file=hg.pid
 cat hg.pid >> $DAEMON_PIDS
-hg --cwd ../test2 push http://localhost:20059/
+hg --cwd ../test2 push http://localhost:$HGPORT/ | sed -e 's,:[0-9][0-9]*/,/,'
 kill `cat hg.pid`
 
 echo % expect success
 echo 'allow_push = *' >> .hg/hgrc
 echo '[hooks]' >> .hg/hgrc
 echo 'changegroup = python ../printenv.py changegroup 0 ../urls' >> .hg/hgrc
-hg serve -p 20059 -d --pid-file=hg.pid
+hg serve -p $HGPORT -d --pid-file=hg.pid
 cat hg.pid >> $DAEMON_PIDS
-hg --cwd ../test2 push http://localhost:20059/
+hg --cwd ../test2 push http://localhost:$HGPORT/ | sed -e 's,:[0-9][0-9]*/,/,'
 kill `cat hg.pid`
 hg rollback
 
@@ -52,14 +52,14 @@
 echo '[web]' > .hg/hgrc
 echo 'push_ssl = false' >> .hg/hgrc
 echo 'deny_push = *' >> .hg/hgrc
-hg serve -p 20059 -d --pid-file=hg.pid
+hg serve -p $HGPORT -d --pid-file=hg.pid
 cat hg.pid >> $DAEMON_PIDS
-hg --cwd ../test2 push http://localhost:20059/
+hg --cwd ../test2 push http://localhost:$HGPORT/ | sed -e 's,:[0-9][0-9]*/,/,'
 kill `cat hg.pid`
 
 echo % expect authorization error: some users denied, users must be authenticated
 echo 'deny_push = unperson' >> .hg/hgrc
-hg serve -p 20059 -d --pid-file=hg.pid
+hg serve -p $HGPORT -d --pid-file=hg.pid
 cat hg.pid >> $DAEMON_PIDS
-hg --cwd ../test2 push http://localhost:20059/
+hg --cwd ../test2 push http://localhost:$HGPORT/ | sed -e 's,:[0-9][0-9]*/,/,'
 kill `cat hg.pid`
--- a/tests/test-push-http.out	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-push-http.out	Wed Feb 06 19:57:52 2008 -0800
@@ -1,19 +1,19 @@
 adding a
 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
 % expect ssl error
-pushing to http://localhost:20059/
+pushing to http://localhost/
 searching for changes
 ssl required
 % expect authorization error
-pushing to http://localhost:20059/
+pushing to http://localhost/
 searching for changes
 push not authorized
 % expect authorization error: must have authorized user
-pushing to http://localhost:20059/
+pushing to http://localhost/
 searching for changes
 push not authorized
 % expect success
-pushing to http://localhost:20059/
+pushing to http://localhost/
 searching for changes
 adding changesets
 adding manifests
@@ -22,10 +22,10 @@
 rolling back last transaction
 changegroup hook: HG_NODE=ba677d0156c1196c1a699fa53f390dcfc3ce3872 HG_SOURCE=serve HG_URL=remote:http 
 % expect authorization error: all users denied
-pushing to http://localhost:20059/
+pushing to http://localhost/
 searching for changes
 push not authorized
 % expect authorization error: some users denied, users must be authenticated
-pushing to http://localhost:20059/
+pushing to http://localhost/
 searching for changes
 push not authorized
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-qrecord	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,100 @@
+#!/bin/sh
+
+echo "[ui]" >> $HGRCPATH
+echo "interactive=true" >> $HGRCPATH
+echo "[extensions]"     >> $HGRCPATH
+echo "record="          >> $HGRCPATH
+
+echo "% help (no mq, so no qrecord)"
+
+hg help qrecord
+
+echo "mq="              >> $HGRCPATH
+
+echo "% help (mq present)"
+
+hg help qrecord
+
+hg init a
+cd a
+
+echo % base commit
+
+cat > 1.txt <<EOF
+1
+2
+3
+4
+5
+EOF
+cat > 2.txt <<EOF
+a
+b
+c
+d
+e
+f
+EOF
+mkdir dir
+cat > dir/a.txt <<EOF
+hello world
+
+someone
+up
+there
+loves
+me
+EOF
+
+hg add 1.txt 2.txt dir/a.txt
+hg commit -d '0 0' -m 'initial checkin'
+
+echo % changing files 
+
+sed -e 's/2/2 2/;s/4/4 4/' 1.txt > 1.txt.new
+sed -e 's/b/b b/' 2.txt > 2.txt.new
+sed -e 's/hello world/hello world!/' dir/a.txt > dir/a.txt.new
+
+mv -f 1.txt.new 1.txt
+mv -f 2.txt.new 2.txt
+mv -f dir/a.txt.new dir/a.txt
+
+echo % whole diff
+
+hg diff --nodates
+
+echo % qrecord a.patch
+
+hg qrecord -d '0 0' -m aaa a.patch <<EOF
+y
+y
+n
+y
+y
+n
+EOF
+
+echo
+echo % "after qrecord a.patch 'tip'"
+hg tip -p
+echo
+echo % "after qrecord a.patch 'diff'"
+hg diff --nodates
+
+echo % qrecord b.patch
+hg qrecord -d '0 0' -m bbb b.patch <<EOF
+y
+y
+y
+y
+EOF
+
+echo
+echo % "after qrecord b.patch 'tip'"
+hg tip -p
+echo
+echo % "after qrecord b.patch 'diff'"
+hg diff --nodates
+
+echo
+echo % --- end ---
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-qrecord.out	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,207 @@
+% help (no mq, so no qrecord)
+hg: unknown command 'qrecord'
+Mercurial Distributed SCM
+
+basic commands:
+
+ add        add the specified files on the next commit
+ annotate   show changeset information per file line
+ clone      make a copy of an existing repository
+ commit     commit the specified files or all outstanding changes
+ diff       diff repository (or selected files)
+ export     dump the header and diffs for one or more changesets
+ init       create a new repository in the given directory
+ log        show revision history of entire repository or files
+ merge      merge working directory with another revision
+ parents    show the parents of the working dir or revision
+ pull       pull changes from the specified source
+ push       push changes to the specified destination
+ remove     remove the specified files on the next commit
+ serve      export the repository via HTTP
+ status     show changed files in the working directory
+ update     update working directory
+
+use "hg help" for the full list of commands or "hg -v" for details
+% help (mq present)
+hg qrecord [OPTION]... PATCH [FILE]...
+
+interactively record a new patch
+
+    see 'hg help qnew' & 'hg help record' for more information and usage
+
+options:
+
+ -e --edit         edit commit message
+ -g --git          use git extended diff format
+ -I --include      include names matching the given patterns
+ -X --exclude      exclude names matching the given patterns
+ -m --message      use <text> as commit message
+ -l --logfile      read commit message from <file>
+ -U --currentuser  add "From: <current user>" to patch
+ -u --user         add "From: <given user>" to patch
+ -D --currentdate  add "Date: <current date>" to patch
+ -d --date         add "Date: <given date>" to patch
+
+use "hg -v help qrecord" to show global options
+% base commit
+% changing files
+% whole diff
+diff -r 1057167b20ef 1.txt
+--- a/1.txt
++++ b/1.txt
+@@ -1,5 +1,5 @@
+ 1
+-2
++2 2
+ 3
+-4
++4 4
+ 5
+diff -r 1057167b20ef 2.txt
+--- a/2.txt
++++ b/2.txt
+@@ -1,5 +1,5 @@
+ a
+-b
++b b
+ c
+ d
+ e
+diff -r 1057167b20ef dir/a.txt
+--- a/dir/a.txt
++++ b/dir/a.txt
+@@ -1,4 +1,4 @@
+-hello world
++hello world!
+ 
+ someone
+ up
+% qrecord a.patch
+diff --git a/1.txt b/1.txt
+2 hunks, 4 lines changed
+examine changes to '1.txt'? [Ynsfdaq?]  @@ -1,3 +1,3 @@
+ 1
+-2
++2 2
+ 3
+record this change to '1.txt'? [Ynsfdaq?]  @@ -3,3 +3,3 @@
+ 3
+-4
++4 4
+ 5
+record this change to '1.txt'? [Ynsfdaq?]  diff --git a/2.txt b/2.txt
+1 hunks, 2 lines changed
+examine changes to '2.txt'? [Ynsfdaq?]  @@ -1,5 +1,5 @@
+ a
+-b
++b b
+ c
+ d
+ e
+record this change to '2.txt'? [Ynsfdaq?]  diff --git a/dir/a.txt b/dir/a.txt
+1 hunks, 2 lines changed
+examine changes to 'dir/a.txt'? [Ynsfdaq?]  
+% after qrecord a.patch 'tip'
+changeset:   1:5d1ca63427ee
+tag:         qtip
+tag:         tip
+tag:         a.patch
+tag:         qbase
+user:        test
+date:        Thu Jan 01 00:00:00 1970 +0000
+summary:     aaa
+
+diff -r 1057167b20ef -r 5d1ca63427ee 1.txt
+--- a/1.txt	Thu Jan 01 00:00:00 1970 +0000
++++ b/1.txt	Thu Jan 01 00:00:00 1970 +0000
+@@ -1,5 +1,5 @@
+ 1
+-2
++2 2
+ 3
+ 4
+ 5
+diff -r 1057167b20ef -r 5d1ca63427ee 2.txt
+--- a/2.txt	Thu Jan 01 00:00:00 1970 +0000
++++ b/2.txt	Thu Jan 01 00:00:00 1970 +0000
+@@ -1,5 +1,5 @@
+ a
+-b
++b b
+ c
+ d
+ e
+
+
+% after qrecord a.patch 'diff'
+diff -r 5d1ca63427ee 1.txt
+--- a/1.txt
++++ b/1.txt
+@@ -1,5 +1,5 @@
+ 1
+ 2 2
+ 3
+-4
++4 4
+ 5
+diff -r 5d1ca63427ee dir/a.txt
+--- a/dir/a.txt
++++ b/dir/a.txt
+@@ -1,4 +1,4 @@
+-hello world
++hello world!
+ 
+ someone
+ up
+% qrecord b.patch
+diff --git a/1.txt b/1.txt
+1 hunks, 2 lines changed
+examine changes to '1.txt'? [Ynsfdaq?]  @@ -1,5 +1,5 @@
+ 1
+ 2 2
+ 3
+-4
++4 4
+ 5
+record this change to '1.txt'? [Ynsfdaq?]  diff --git a/dir/a.txt b/dir/a.txt
+1 hunks, 2 lines changed
+examine changes to 'dir/a.txt'? [Ynsfdaq?]  @@ -1,4 +1,4 @@
+-hello world
++hello world!
+ 
+ someone
+ up
+record this change to 'dir/a.txt'? [Ynsfdaq?]  
+% after qrecord b.patch 'tip'
+changeset:   2:b056198bf878
+tag:         qtip
+tag:         tip
+tag:         b.patch
+user:        test
+date:        Thu Jan 01 00:00:00 1970 +0000
+summary:     bbb
+
+diff -r 5d1ca63427ee -r b056198bf878 1.txt
+--- a/1.txt	Thu Jan 01 00:00:00 1970 +0000
++++ b/1.txt	Thu Jan 01 00:00:00 1970 +0000
+@@ -1,5 +1,5 @@
+ 1
+ 2 2
+ 3
+-4
++4 4
+ 5
+diff -r 5d1ca63427ee -r b056198bf878 dir/a.txt
+--- a/dir/a.txt	Thu Jan 01 00:00:00 1970 +0000
++++ b/dir/a.txt	Thu Jan 01 00:00:00 1970 +0000
+@@ -1,4 +1,4 @@
+-hello world
++hello world!
+ 
+ someone
+ up
+
+
+% after qrecord b.patch 'diff'
+
+% --- end ---
--- a/tests/test-rawcommit1.out	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-rawcommit1.out	Wed Feb 06 19:57:52 2008 -0800
@@ -1,9 +1,9 @@
-05f9e54f4c9b86b09099803d8b49a50edcb4eaab 644 a
-54837d97f2932a8194e69745a280a2c11e61ff9c 644 b
+05f9e54f4c9b86b09099803d8b49a50edcb4eaab 644   a
+54837d97f2932a8194e69745a280a2c11e61ff9c 644   b
 (the rawcommit command is deprecated)
-05f9e54f4c9b86b09099803d8b49a50edcb4eaab 644 a
-54837d97f2932a8194e69745a280a2c11e61ff9c 644 b
-76d5e637cbec1bcc04a5a3fa4bcc7d13f6847c00 644 c
+05f9e54f4c9b86b09099803d8b49a50edcb4eaab 644   a
+54837d97f2932a8194e69745a280a2c11e61ff9c 644   b
+76d5e637cbec1bcc04a5a3fa4bcc7d13f6847c00 644   c
 changeset:   2:e110db3db549
 tag:         tip
 user:        test
@@ -14,8 +14,8 @@
 
 
 (the rawcommit command is deprecated)
-05f9e54f4c9b86b09099803d8b49a50edcb4eaab 644 a
-76d5e637cbec1bcc04a5a3fa4bcc7d13f6847c00 644 c
+05f9e54f4c9b86b09099803d8b49a50edcb4eaab 644   a
+76d5e637cbec1bcc04a5a3fa4bcc7d13f6847c00 644   c
 changeset:   3:20652cf30cc0
 tag:         tip
 user:        test
@@ -26,8 +26,8 @@
 
 
 (the rawcommit command is deprecated)
-d6e3c4976c13feb1728cd3ac851abaf7256a5c23 644 a
-76d5e637cbec1bcc04a5a3fa4bcc7d13f6847c00 644 c
+d6e3c4976c13feb1728cd3ac851abaf7256a5c23 644   a
+76d5e637cbec1bcc04a5a3fa4bcc7d13f6847c00 644   c
 changeset:   4:42556b925639
 tag:         tip
 user:        test
@@ -38,9 +38,9 @@
 
 
 (the rawcommit command is deprecated)
-05f9e54f4c9b86b09099803d8b49a50edcb4eaab 644 a
-54837d97f2932a8194e69745a280a2c11e61ff9c 644 b
-3570202ceac2b52517df64ebd0a062cb0d8fe33a 644 c
+05f9e54f4c9b86b09099803d8b49a50edcb4eaab 644   a
+54837d97f2932a8194e69745a280a2c11e61ff9c 644   b
+3570202ceac2b52517df64ebd0a062cb0d8fe33a 644   c
 changeset:   4:42556b925639
 user:        test
 date:        Mon Jan 12 13:46:40 1970 +0000
@@ -50,8 +50,8 @@
 
 
 (the rawcommit command is deprecated)
-d6e3c4976c13feb1728cd3ac851abaf7256a5c23 644 a
-76d5e637cbec1bcc04a5a3fa4bcc7d13f6847c00 644 c
+d6e3c4976c13feb1728cd3ac851abaf7256a5c23 644   a
+76d5e637cbec1bcc04a5a3fa4bcc7d13f6847c00 644   c
 changeset:   6:8a0c9254b0ab
 tag:         tip
 parent:      4:42556b925639
@@ -63,8 +63,8 @@
 
 
 (the rawcommit command is deprecated)
-d6e3c4976c13feb1728cd3ac851abaf7256a5c23 644 a
-76d5e637cbec1bcc04a5a3fa4bcc7d13f6847c00 644 c
+d6e3c4976c13feb1728cd3ac851abaf7256a5c23 644   a
+76d5e637cbec1bcc04a5a3fa4bcc7d13f6847c00 644   c
 changeset:   7:a5a6e1f312b9
 tag:         tip
 user:        test
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-rebuildstate	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,24 @@
+#!/bin/sh
+# basic test for hg debugrebuildstate
+
+hg init repo
+cd repo
+
+touch foo bar
+hg ci -Am 'add foo bar'
+
+touch baz
+hg add baz
+hg rm bar
+
+echo '% state dump'
+hg debugstate | cut -b 1-16,37- | sort
+echo '% status'
+hg st -A
+
+hg debugrebuildstate
+echo '% state dump'
+hg debugstate | cut -b 1-16,37- | sort
+echo '% status'
+hg st -A
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-rebuildstate.out	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,17 @@
+adding bar
+adding foo
+% state dump
+a   0         -1 baz
+n 644          0 foo
+r   0          0 bar
+% status
+A baz
+R bar
+C foo
+% state dump
+n 666         -1 bar
+n 666         -1 foo
+% status
+! bar
+? baz
+C foo
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-record	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,266 @@
+#!/bin/sh
+
+echo "[ui]" >> $HGRCPATH
+echo "interactive=true" >> $HGRCPATH
+echo "[extensions]" >> $HGRCPATH
+echo "record=" >> $HGRCPATH
+
+echo % help
+
+hg help record
+
+hg init a
+cd a
+
+echo % select no files
+
+touch empty-rw
+hg add empty-rw
+hg record empty-rw<<EOF
+n
+EOF
+echo; hg tip -p
+
+echo % select files but no hunks
+
+hg record empty-rw<<EOF
+y
+n
+EOF
+echo; hg tip -p
+
+echo % record empty file
+
+hg record -d '0 0' -m empty empty-rw<<EOF
+y
+y
+EOF
+echo; hg tip -p
+
+echo % rename empty file
+
+hg mv empty-rw empty-rename
+hg record -d '1 0' -m rename<<EOF
+y
+EOF
+echo; hg tip -p
+
+echo % copy empty file
+
+hg cp empty-rename empty-copy
+hg record -d '2 0' -m copy<<EOF
+y
+EOF
+echo; hg tip -p
+
+echo % delete empty file
+
+hg rm empty-copy
+hg record -d '3 0' -m delete<<EOF
+y
+EOF
+echo; hg tip -p
+
+echo % add binary file
+
+hg bundle --base -2 tip.bundle
+hg add tip.bundle
+hg record -d '4 0' -m binary<<EOF
+y
+EOF
+echo; hg tip -p
+
+echo % change binary file
+
+hg bundle --base -2 tip.bundle
+hg record -d '5 0' -m binary-change<<EOF
+y
+EOF
+echo; hg tip -p
+
+echo % rename and change binary file
+
+hg mv tip.bundle top.bundle
+hg bundle --base -2 top.bundle
+hg record -d '6 0' -m binary-change-rename<<EOF
+y
+EOF
+echo; hg tip -p
+
+echo % add plain file
+
+for i in 1 2 3 4 5 6 7 8 9 10; do
+    echo $i >> plain
+done
+
+hg add plain
+hg record -d '7 0' -m plain plain<<EOF
+y
+y
+EOF
+echo; hg tip -p
+
+echo % modify end of plain file
+
+echo 11 >> plain
+hg record -d '8 0' -m end plain <<EOF
+y
+y
+EOF
+
+echo % modify end of plain file, no EOL
+
+hg tip --template '{node}' >> plain
+hg record -d '9 0' -m noeol plain <<EOF
+y
+y
+EOF
+
+echo % modify end of plain file, add EOL
+
+echo >> plain
+hg record -d '10 0' -m eol plain <<EOF
+y
+y
+y
+EOF
+
+echo % modify beginning, trim end, record both
+
+rm plain
+for i in 2 2 3 4 5 6 7 8 9 10; do
+  echo $i >> plain
+done
+
+hg record -d '10 0' -m begin-and-end plain <<EOF
+y
+y
+y
+EOF
+echo; hg tip -p
+
+echo % trim beginning, modify end
+
+rm plain
+for i in 4 5 6 7 8 9 10.new; do
+  echo $i >> plain
+done
+
+echo % record end
+
+hg record -d '11 0' -m end-only plain <<EOF
+y
+n
+y
+EOF
+echo; hg tip -p
+
+echo % record beginning
+
+hg record -d '12 0' -m begin-only plain <<EOF
+y
+y
+EOF
+echo; hg tip -p
+
+echo % add to beginning, trim from end
+
+rm plain
+for i in 1 2 3 4 5 6 7 8 9; do
+  echo $i >> plain
+done
+
+echo % record end
+
+hg record --traceback -d '13 0' -m end-again plain<<EOF
+y
+n
+y
+EOF
+
+echo % add to beginning, middle, end
+
+rm plain
+for i in 1 2 3 4 5 5.new 5.reallynew 6 7 8 9 10 11; do
+  echo $i >> plain
+done
+
+echo % record beginning, middle
+
+hg record -d '14 0' -m middle-only plain <<EOF
+y
+y
+y
+n
+EOF
+echo; hg tip -p
+
+echo % record end
+
+hg record -d '15 0' -m end-only plain <<EOF
+y
+y
+EOF
+echo; hg tip -p
+
+mkdir subdir
+cd subdir
+echo a > a
+hg ci -d '16 0' -Amsubdir
+
+echo a >> a
+hg record -d '16 0' -m subdir-change a <<EOF
+y
+y
+EOF
+echo; hg tip -p
+
+echo a > f1
+echo b > f2
+hg add f1 f2
+
+hg ci -mz -d '17 0'
+
+echo a >> f1
+echo b >> f2
+
+echo % help, quit
+
+hg record <<EOF
+?
+q
+EOF
+
+echo % skip
+
+hg record <<EOF
+s
+EOF
+
+echo % no
+
+hg record <<EOF
+n
+EOF
+
+echo % f, quit
+
+hg record <<EOF
+f
+q
+EOF
+
+echo % s, all
+
+hg record -d '18 0' -mx <<EOF
+s
+a
+EOF
+echo; hg tip -p
+
+echo % f
+
+hg record -d '19 0' -my <<EOF
+f
+EOF
+echo; hg tip -p
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-record.out	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,493 @@
+% help
+hg record [OPTION]... [FILE]...
+
+interactively select changes to commit
+
+    If a list of files is omitted, all changes reported by "hg status"
+    will be candidates for recording.
+
+    You will be prompted for whether to record changes to each
+    modified file, and for files with multiple changes, for each
+    change to use.  For each query, the following responses are
+    possible:
+
+    y - record this change
+    n - skip this change
+
+    s - skip remaining changes to this file
+    f - record remaining changes to this file
+
+    d - done, skip remaining changes and files
+    a - record all changes to all remaining files
+    q - quit, recording no changes
+
+    ? - display help
+
+options:
+
+ -A --addremove  mark new/missing files as added/removed before committing
+ -I --include    include names matching the given patterns
+ -X --exclude    exclude names matching the given patterns
+ -m --message    use <text> as commit message
+ -l --logfile    read commit message from <file>
+ -d --date       record datecode as commit date
+ -u --user       record user as committer
+
+use "hg -v help record" to show global options
+% select no files
+diff --git a/empty-rw b/empty-rw
+new file mode 100644
+examine changes to 'empty-rw'? [Ynsfdaq?]  no changes to record
+
+changeset:   -1:000000000000
+tag:         tip
+user:        
+date:        Thu Jan 01 00:00:00 1970 +0000
+
+
+% select files but no hunks
+diff --git a/empty-rw b/empty-rw
+new file mode 100644
+examine changes to 'empty-rw'? [Ynsfdaq?]  transaction abort!
+rollback completed
+abort: empty commit message
+
+changeset:   -1:000000000000
+tag:         tip
+user:        
+date:        Thu Jan 01 00:00:00 1970 +0000
+
+
+% record empty file
+diff --git a/empty-rw b/empty-rw
+new file mode 100644
+examine changes to 'empty-rw'? [Ynsfdaq?]  
+changeset:   0:c0708cf4e46e
+tag:         tip
+user:        test
+date:        Thu Jan 01 00:00:00 1970 +0000
+summary:     empty
+
+
+% rename empty file
+diff --git a/empty-rw b/empty-rename
+rename from empty-rw
+rename to empty-rename
+examine changes to 'empty-rw' and 'empty-rename'? [Ynsfdaq?]  
+changeset:   1:df251d174da3
+tag:         tip
+user:        test
+date:        Thu Jan 01 00:00:01 1970 +0000
+summary:     rename
+
+
+% copy empty file
+diff --git a/empty-rename b/empty-copy
+copy from empty-rename
+copy to empty-copy
+examine changes to 'empty-rename' and 'empty-copy'? [Ynsfdaq?]  
+changeset:   2:b63ea3939f8d
+tag:         tip
+user:        test
+date:        Thu Jan 01 00:00:02 1970 +0000
+summary:     copy
+
+
+% delete empty file
+diff --git a/empty-copy b/empty-copy
+deleted file mode 100644
+examine changes to 'empty-copy'? [Ynsfdaq?]  
+changeset:   3:a2546574bce9
+tag:         tip
+user:        test
+date:        Thu Jan 01 00:00:03 1970 +0000
+summary:     delete
+
+
+% add binary file
+1 changesets found
+diff --git a/tip.bundle b/tip.bundle
+new file mode 100644
+this is a binary file
+examine changes to 'tip.bundle'? [Ynsfdaq?]  
+changeset:   4:9e998a545a8b
+tag:         tip
+user:        test
+date:        Thu Jan 01 00:00:04 1970 +0000
+summary:     binary
+
+diff -r a2546574bce9 -r 9e998a545a8b tip.bundle
+Binary file tip.bundle has changed
+
+% change binary file
+1 changesets found
+diff --git a/tip.bundle b/tip.bundle
+this modifies a binary file (all or nothing)
+examine changes to 'tip.bundle'? [Ynsfdaq?]  
+changeset:   5:93d05561507d
+tag:         tip
+user:        test
+date:        Thu Jan 01 00:00:05 1970 +0000
+summary:     binary-change
+
+diff -r 9e998a545a8b -r 93d05561507d tip.bundle
+Binary file tip.bundle has changed
+
+% rename and change binary file
+1 changesets found
+diff --git a/tip.bundle b/top.bundle
+rename from tip.bundle
+rename to top.bundle
+this modifies a binary file (all or nothing)
+examine changes to 'tip.bundle' and 'top.bundle'? [Ynsfdaq?]  
+changeset:   6:699cc1bea9aa
+tag:         tip
+user:        test
+date:        Thu Jan 01 00:00:06 1970 +0000
+summary:     binary-change-rename
+
+diff -r 93d05561507d -r 699cc1bea9aa tip.bundle
+Binary file tip.bundle has changed
+diff -r 93d05561507d -r 699cc1bea9aa top.bundle
+Binary file top.bundle has changed
+
+% add plain file
+diff --git a/plain b/plain
+new file mode 100644
+examine changes to 'plain'? [Ynsfdaq?]  
+changeset:   7:118ed744216b
+tag:         tip
+user:        test
+date:        Thu Jan 01 00:00:07 1970 +0000
+summary:     plain
+
+diff -r 699cc1bea9aa -r 118ed744216b plain
+--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
++++ b/plain	Thu Jan 01 00:00:07 1970 +0000
+@@ -0,0 +1,10 @@
++1
++2
++3
++4
++5
++6
++7
++8
++9
++10
+
+% modify end of plain file
+diff --git a/plain b/plain
+1 hunks, 1 lines changed
+examine changes to 'plain'? [Ynsfdaq?]  @@ -8,3 +8,4 @@
+ 8
+ 9
+ 10
++11
+record this change to 'plain'? [Ynsfdaq?]  % modify end of plain file, no EOL
+diff --git a/plain b/plain
+1 hunks, 1 lines changed
+examine changes to 'plain'? [Ynsfdaq?]  @@ -9,3 +9,4 @@
+ 9
+ 10
+ 11
++cf81a2760718a74d44c0c2eecb72f659e63a69c5
+\ No newline at end of file
+record this change to 'plain'? [Ynsfdaq?]  % modify end of plain file, add EOL
+diff --git a/plain b/plain
+1 hunks, 2 lines changed
+examine changes to 'plain'? [Ynsfdaq?]  @@ -9,4 +9,4 @@
+ 9
+ 10
+ 11
+-cf81a2760718a74d44c0c2eecb72f659e63a69c5
+\ No newline at end of file
++cf81a2760718a74d44c0c2eecb72f659e63a69c5
+record this change to 'plain'? [Ynsfdaq?]  % modify beginning, trim end, record both
+diff --git a/plain b/plain
+2 hunks, 4 lines changed
+examine changes to 'plain'? [Ynsfdaq?]  @@ -1,4 +1,4 @@
+-1
++2
+ 2
+ 3
+ 4
+record this change to 'plain'? [Ynsfdaq?]  @@ -8,5 +8,3 @@
+ 8
+ 9
+ 10
+-11
+-cf81a2760718a74d44c0c2eecb72f659e63a69c5
+record this change to 'plain'? [Ynsfdaq?]  
+changeset:   11:d09ab1967dab
+tag:         tip
+user:        test
+date:        Thu Jan 01 00:00:10 1970 +0000
+summary:     begin-and-end
+
+diff -r e2ecd9b0b78d -r d09ab1967dab plain
+--- a/plain	Thu Jan 01 00:00:10 1970 +0000
++++ b/plain	Thu Jan 01 00:00:10 1970 +0000
+@@ -1,4 +1,4 @@
+-1
++2
+ 2
+ 3
+ 4
+@@ -8,5 +8,3 @@
+ 8
+ 9
+ 10
+-11
+-cf81a2760718a74d44c0c2eecb72f659e63a69c5
+
+% trim beginning, modify end
+% record end
+diff --git a/plain b/plain
+2 hunks, 5 lines changed
+examine changes to 'plain'? [Ynsfdaq?]  @@ -1,9 +1,6 @@
+-2
+-2
+-3
+ 4
+ 5
+ 6
+ 7
+ 8
+ 9
+record this change to 'plain'? [Ynsfdaq?]  @@ -4,7 +1,7 @@
+ 4
+ 5
+ 6
+ 7
+ 8
+ 9
+-10
++10.new
+record this change to 'plain'? [Ynsfdaq?]  
+changeset:   12:44516c9708ae
+tag:         tip
+user:        test
+date:        Thu Jan 01 00:00:11 1970 +0000
+summary:     end-only
+
+diff -r d09ab1967dab -r 44516c9708ae plain
+--- a/plain	Thu Jan 01 00:00:10 1970 +0000
++++ b/plain	Thu Jan 01 00:00:11 1970 +0000
+@@ -7,4 +7,4 @@
+ 7
+ 8
+ 9
+-10
++10.new
+
+% record beginning
+diff --git a/plain b/plain
+1 hunks, 3 lines changed
+examine changes to 'plain'? [Ynsfdaq?]  @@ -1,6 +1,3 @@
+-2
+-2
+-3
+ 4
+ 5
+ 6
+record this change to 'plain'? [Ynsfdaq?]  
+changeset:   13:3ebbace64a8d
+tag:         tip
+user:        test
+date:        Thu Jan 01 00:00:12 1970 +0000
+summary:     begin-only
+
+diff -r 44516c9708ae -r 3ebbace64a8d plain
+--- a/plain	Thu Jan 01 00:00:11 1970 +0000
++++ b/plain	Thu Jan 01 00:00:12 1970 +0000
+@@ -1,6 +1,3 @@
+-2
+-2
+-3
+ 4
+ 5
+ 6
+
+% add to beginning, trim from end
+% record end
+diff --git a/plain b/plain
+2 hunks, 4 lines changed
+examine changes to 'plain'? [Ynsfdaq?]  @@ -1,6 +1,9 @@
++1
++2
++3
+ 4
+ 5
+ 6
+ 7
+ 8
+ 9
+record this change to 'plain'? [Ynsfdaq?]  @@ -1,7 +4,6 @@
+ 4
+ 5
+ 6
+ 7
+ 8
+ 9
+-10.new
+record this change to 'plain'? [Ynsfdaq?]  % add to beginning, middle, end
+% record beginning, middle
+diff --git a/plain b/plain
+3 hunks, 7 lines changed
+examine changes to 'plain'? [Ynsfdaq?]  @@ -1,2 +1,5 @@
++1
++2
++3
+ 4
+ 5
+record this change to 'plain'? [Ynsfdaq?]  @@ -1,6 +4,8 @@
+ 4
+ 5
++5.new
++5.reallynew
+ 6
+ 7
+ 8
+ 9
+record this change to 'plain'? [Ynsfdaq?]  @@ -3,4 +8,6 @@
+ 6
+ 7
+ 8
+ 9
++10
++11
+record this change to 'plain'? [Ynsfdaq?]  
+changeset:   15:c1c639d8b268
+tag:         tip
+user:        test
+date:        Thu Jan 01 00:00:14 1970 +0000
+summary:     middle-only
+
+diff -r efc0dad7bd9f -r c1c639d8b268 plain
+--- a/plain	Thu Jan 01 00:00:13 1970 +0000
++++ b/plain	Thu Jan 01 00:00:14 1970 +0000
+@@ -1,5 +1,10 @@
++1
++2
++3
+ 4
+ 5
++5.new
++5.reallynew
+ 6
+ 7
+ 8
+
+% record end
+diff --git a/plain b/plain
+1 hunks, 2 lines changed
+examine changes to 'plain'? [Ynsfdaq?]  @@ -9,3 +9,5 @@
+ 7
+ 8
+ 9
++10
++11
+record this change to 'plain'? [Ynsfdaq?]  
+changeset:   16:80b74bbc7808
+tag:         tip
+user:        test
+date:        Thu Jan 01 00:00:15 1970 +0000
+summary:     end-only
+
+diff -r c1c639d8b268 -r 80b74bbc7808 plain
+--- a/plain	Thu Jan 01 00:00:14 1970 +0000
++++ b/plain	Thu Jan 01 00:00:15 1970 +0000
+@@ -9,3 +9,5 @@
+ 7
+ 8
+ 9
++10
++11
+
+adding subdir/a
+diff --git a/subdir/a b/subdir/a
+1 hunks, 1 lines changed
+examine changes to 'subdir/a'? [Ynsfdaq?]  @@ -1,1 +1,2 @@
+ a
++a
+record this change to 'subdir/a'? [Ynsfdaq?]  
+changeset:   18:33ff5c4fb017
+tag:         tip
+user:        test
+date:        Thu Jan 01 00:00:16 1970 +0000
+summary:     subdir-change
+
+diff -r aecf2b2ea83c -r 33ff5c4fb017 subdir/a
+--- a/subdir/a	Thu Jan 01 00:00:16 1970 +0000
++++ b/subdir/a	Thu Jan 01 00:00:16 1970 +0000
+@@ -1,1 +1,2 @@
+ a
++a
+
+% help, quit
+diff --git a/subdir/f1 b/subdir/f1
+1 hunks, 1 lines changed
+examine changes to 'subdir/f1'? [Ynsfdaq?]  y - record this change
+n - skip this change
+s - skip remaining changes to this file
+f - record remaining changes to this file
+d - done, skip remaining changes and files
+a - record all changes to all remaining files
+q - quit, recording no changes
+? - display help
+examine changes to 'subdir/f1'? [Ynsfdaq?]  abort: user quit
+% skip
+diff --git a/subdir/f1 b/subdir/f1
+1 hunks, 1 lines changed
+examine changes to 'subdir/f1'? [Ynsfdaq?]  diff --git a/subdir/f2 b/subdir/f2
+1 hunks, 1 lines changed
+examine changes to 'subdir/f2'? [Ynsfdaq?]  abort: response expected
+% no
+diff --git a/subdir/f1 b/subdir/f1
+1 hunks, 1 lines changed
+examine changes to 'subdir/f1'? [Ynsfdaq?]  diff --git a/subdir/f2 b/subdir/f2
+1 hunks, 1 lines changed
+examine changes to 'subdir/f2'? [Ynsfdaq?]  abort: response expected
+% f, quit
+diff --git a/subdir/f1 b/subdir/f1
+1 hunks, 1 lines changed
+examine changes to 'subdir/f1'? [Ynsfdaq?]  diff --git a/subdir/f2 b/subdir/f2
+1 hunks, 1 lines changed
+examine changes to 'subdir/f2'? [Ynsfdaq?]  abort: user quit
+% s, all
+diff --git a/subdir/f1 b/subdir/f1
+1 hunks, 1 lines changed
+examine changes to 'subdir/f1'? [Ynsfdaq?]  diff --git a/subdir/f2 b/subdir/f2
+1 hunks, 1 lines changed
+examine changes to 'subdir/f2'? [Ynsfdaq?]  
+changeset:   20:094183e04b7c
+tag:         tip
+user:        test
+date:        Thu Jan 01 00:00:18 1970 +0000
+summary:     x
+
+diff -r f9e855cd9374 -r 094183e04b7c subdir/f2
+--- a/subdir/f2	Thu Jan 01 00:00:17 1970 +0000
++++ b/subdir/f2	Thu Jan 01 00:00:18 1970 +0000
+@@ -1,1 +1,2 @@
+ b
++b
+
+% f
+diff --git a/subdir/f1 b/subdir/f1
+1 hunks, 1 lines changed
+examine changes to 'subdir/f1'? [Ynsfdaq?]  
+changeset:   21:38164785b0ef
+tag:         tip
+user:        test
+date:        Thu Jan 01 00:00:19 1970 +0000
+summary:     y
+
+diff -r 094183e04b7c -r 38164785b0ef subdir/f1
+--- a/subdir/f1	Thu Jan 01 00:00:18 1970 +0000
++++ b/subdir/f1	Thu Jan 01 00:00:19 1970 +0000
+@@ -1,1 +1,2 @@
+ a
++a
+
--- a/tests/test-rename	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-rename	Wed Feb 06 19:57:52 2008 -0800
@@ -88,6 +88,11 @@
 diff d1/b d2/b
 hg update -C
 
+echo "# attempt to move one file into a non-existent directory"
+hg rename d1/a dx/
+hg status -C
+hg update -C
+
 echo "# attempt to move potentially more than one file into a non-existent"
 echo "# directory"
 hg rename 'glob:d1/**' dx
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-rename-after-merge	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,34 @@
+#!/bin/sh
+
+# Test issue 746: renaming files brought by the
+# second parent of a merge was broken.
+
+echo % create source repository
+hg init t
+cd t
+echo a > a
+hg ci -Am a
+cd ..
+
+echo % fork source repository
+hg clone t t2
+cd t2
+echo b > b
+hg ci -Am b
+
+echo % update source repository
+cd ../t
+echo a >> a
+hg ci -m a2
+
+echo % merge repositories
+hg pull ../t2
+hg merge
+hg st
+
+echo % rename b as c
+hg mv b c
+hg st
+echo % rename back c as b
+hg mv c b
+hg st
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-rename-after-merge.out	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,22 @@
+% create source repository
+adding a
+% fork source repository
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+adding b
+% update source repository
+% merge repositories
+pulling from ../t2
+searching for changes
+adding changesets
+adding manifests
+adding file changes
+added 1 changesets with 1 changes to 1 files (+1 heads)
+(run 'hg heads' to see heads, 'hg merge' to merge)
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+(branch merge, don't forget to commit)
+M b
+% rename b as c
+A c
+R b
+% rename back c as b
+M b
--- a/tests/test-rename-dir-merge	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-rename-dir-merge	Wed Feb 06 19:57:52 2008 -0800
@@ -7,9 +7,7 @@
 mkdir a
 echo foo > a/a
 echo bar > a/b
-
-hg add a
-hg ci -m "0" -d "0 0"
+hg ci -Am "0" -d "0 0"
 
 hg co -C 0
 hg mv a b
@@ -17,6 +15,7 @@
 
 hg co -C 0
 echo baz > a/c
+echo quux > a/d
 hg add a/c
 hg ci -m "2 add a/c" -d "0 0"
 
@@ -24,9 +23,11 @@
 echo a/* b/*
 hg st -C
 hg ci -m "3 merge 2+1" -d "0 0"
+hg debugrename b/c
 
 hg co -C 1
 hg merge --debug 2
 echo a/* b/*
 hg st -C
 hg ci -m "4 merge 1+2" -d "0 0"
+hg debugrename b/c
--- a/tests/test-rename-dir-merge.out	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-rename-dir-merge.out	Wed Feb 06 19:57:52 2008 -0800
@@ -1,14 +1,27 @@
 adding a/a
 adding a/b
 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
-copying a/a to b/a
-copying a/b to b/b
-removing a/a
-removing a/b
+moving a/a to b/a
+moving a/b to b/b
 2 files updated, 0 files merged, 2 files removed, 0 files unresolved
 resolving manifests
  overwrite None partial False
  ancestor f9b20c0d4c51 local ce36d17b18fb+ remote 55119e611c80
+  searching for copies back to rev 1
+  unmatched files in local:
+   a/c
+   a/d
+  unmatched files in other:
+   b/a
+   b/b
+  all copies found (* = to merge, ! = divergent):
+   b/a -> a/a 
+   b/b -> a/b 
+  checking for directory renames
+  dir a/ -> b/
+  file a/c -> b/c
+  file a/d -> b/d
+ a/d: remote renamed directory to b/d -> d
  a/c: remote renamed directory to b/c -> d
  a/b: other deleted -> r
  a/a: other deleted -> r
@@ -17,11 +30,12 @@
 removing a/a
 removing a/b
 moving a/c to b/c
+moving a/d to b/d
 getting b/a
 getting b/b
-3 files updated, 0 files merged, 2 files removed, 0 files unresolved
+4 files updated, 0 files merged, 2 files removed, 0 files unresolved
 (branch merge, don't forget to commit)
-a/* b/a b/b b/c
+a/* b/a b/b b/c b/d
 M b/a
 M b/b
 A b/c
@@ -29,14 +43,31 @@
 R a/a
 R a/b
 R a/c
+? b/d
+b/c renamed from a/c:354ae8da6e890359ef49ade27b68bbc361f3ca88
 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
 resolving manifests
  overwrite None partial False
  ancestor f9b20c0d4c51 local 55119e611c80+ remote ce36d17b18fb
+  searching for copies back to rev 1
+  unmatched files in local:
+   b/a
+   b/b
+   b/d
+  unmatched files in other:
+   a/c
+  all copies found (* = to merge, ! = divergent):
+   b/a -> a/a 
+   b/b -> a/b 
+  checking for directory renames
+  dir a/ -> b/
+  file a/c -> b/c
  None: local renamed directory to b/c -> d
 getting a/c to b/c
 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
 (branch merge, don't forget to commit)
-a/* b/a b/b b/c
+a/* b/a b/b b/c b/d
 A b/c
   a/c
+? b/d
+b/c renamed from a/c:354ae8da6e890359ef49ade27b68bbc361f3ca88
--- a/tests/test-rename-dir-merge2.out	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-rename-dir-merge2.out	Wed Feb 06 19:57:52 2008 -0800
@@ -1,7 +1,6 @@
 adding a/f
 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
-copying a/f to b/f
-removing a/f
+moving a/f to b/f
 adding a/aa/g
 pulling from ../r2
 searching for changes
--- a/tests/test-rename-merge1.out	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-rename-merge1.out	Wed Feb 06 19:57:52 2008 -0800
@@ -4,12 +4,25 @@
 resolving manifests
  overwrite None partial False
  ancestor af1939970a1c local f26ec4fc3fa3+ remote 8e765a822af2
+  searching for copies back to rev 1
+  unmatched files in local:
+   c2
+  unmatched files in other:
+   b
+   b2
+  all copies found (* = to merge, ! = divergent):
+   c2 -> a2 !
+   b -> a *
+   b2 -> a2 !
+  checking for directory renames
  a2: divergent renames -> dr
  a: remote moved to b -> m
  b2: remote created -> g
+copying a to b
+picked tool 'internal:merge' for a (binary False symlink False)
 merging a and b
 my a@f26ec4fc3fa3+ other b@8e765a822af2 ancestor a@af1939970a1c
-copying a to b
+ premerge successful
 removing a
 warning: detected divergent renames of a2 to:
  c2
--- a/tests/test-rename-merge2.out	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-rename-merge2.out	Wed Feb 06 19:57:52 2008 -0800
@@ -4,19 +4,28 @@
 resolving manifests
  overwrite None partial False
  ancestor 924404dff337 local e300d1c794ec+ remote 735846fee2d7
+  searching for copies back to rev 1
+  unmatched files in other:
+   b
+  all copies found (* = to merge, ! = divergent):
+   b -> a *
+  checking for directory renames
  rev: versions differ -> m
  a: remote copied to b -> m
+copying a to b
+picked tool 'python ../merge' for a (binary False symlink False)
 merging a and b
 my a@e300d1c794ec+ other b@735846fee2d7 ancestor a@924404dff337
-copying a to b
+ premerge successful
+picked tool 'python ../merge' for rev (binary False symlink False)
 merging rev
 my rev@e300d1c794ec+ other rev@735846fee2d7 ancestor rev@924404dff337
 0 files updated, 2 files merged, 0 files removed, 0 files unresolved
 (branch merge, don't forget to commit)
 --------------
-M a
 M b
   a
+C a
 --------------
 
 --------------
@@ -25,12 +34,21 @@
 resolving manifests
  overwrite None partial False
  ancestor 924404dff337 local ac809aeed39a+ remote f4db7e329e71
+  searching for copies back to rev 1
+  unmatched files in local:
+   b
+  all copies found (* = to merge, ! = divergent):
+   b -> a *
+  checking for directory renames
  a: remote is newer -> g
  b: local copied to a -> m
  rev: versions differ -> m
 getting a
+picked tool 'python ../merge' for b (binary False symlink False)
 merging b and a
 my b@ac809aeed39a+ other a@f4db7e329e71 ancestor a@924404dff337
+ premerge successful
+picked tool 'python ../merge' for rev (binary False symlink False)
 merging rev
 my rev@ac809aeed39a+ other rev@f4db7e329e71 ancestor rev@924404dff337
 1 files updated, 2 files merged, 0 files removed, 0 files unresolved
@@ -47,12 +65,21 @@
 resolving manifests
  overwrite None partial False
  ancestor 924404dff337 local e300d1c794ec+ remote e03727d2d66b
+  searching for copies back to rev 1
+  unmatched files in other:
+   b
+  all copies found (* = to merge, ! = divergent):
+   b -> a *
+  checking for directory renames
  rev: versions differ -> m
  a: remote moved to b -> m
+copying a to b
+picked tool 'python ../merge' for a (binary False symlink False)
 merging a and b
 my a@e300d1c794ec+ other b@e03727d2d66b ancestor a@924404dff337
-copying a to b
+ premerge successful
 removing a
+picked tool 'python ../merge' for rev (binary False symlink False)
 merging rev
 my rev@e300d1c794ec+ other rev@e03727d2d66b ancestor rev@924404dff337
 0 files updated, 2 files merged, 0 files removed, 0 files unresolved
@@ -68,10 +95,19 @@
 resolving manifests
  overwrite None partial False
  ancestor 924404dff337 local ecf3cb2a4219+ remote f4db7e329e71
+  searching for copies back to rev 1
+  unmatched files in local:
+   b
+  all copies found (* = to merge, ! = divergent):
+   b -> a *
+  checking for directory renames
  b: local moved to a -> m
  rev: versions differ -> m
+picked tool 'python ../merge' for b (binary False symlink False)
 merging b and a
 my b@ecf3cb2a4219+ other a@f4db7e329e71 ancestor a@924404dff337
+ premerge successful
+picked tool 'python ../merge' for rev (binary False symlink False)
 merging rev
 my rev@ecf3cb2a4219+ other rev@f4db7e329e71 ancestor rev@924404dff337
 0 files updated, 2 files merged, 0 files removed, 0 files unresolved
@@ -87,9 +123,16 @@
 resolving manifests
  overwrite None partial False
  ancestor 924404dff337 local 94b33a1b7f2d+ remote 735846fee2d7
+  searching for copies back to rev 1
+  unmatched files in other:
+   b
+  all copies found (* = to merge, ! = divergent):
+   b -> a 
+  checking for directory renames
  rev: versions differ -> m
  b: remote created -> g
 getting b
+picked tool 'python ../merge' for rev (binary False symlink False)
 merging rev
 my rev@94b33a1b7f2d+ other rev@735846fee2d7 ancestor rev@924404dff337
 1 files updated, 1 files merged, 0 files removed, 0 files unresolved
@@ -105,7 +148,14 @@
 resolving manifests
  overwrite None partial False
  ancestor 924404dff337 local ac809aeed39a+ remote 97c705ade336
+  searching for copies back to rev 1
+  unmatched files in local:
+   b
+  all copies found (* = to merge, ! = divergent):
+   b -> a 
+  checking for directory renames
  rev: versions differ -> m
+picked tool 'python ../merge' for rev (binary False symlink False)
 merging rev
 my rev@ac809aeed39a+ other rev@97c705ade336 ancestor rev@924404dff337
 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
@@ -121,11 +171,18 @@
 resolving manifests
  overwrite None partial False
  ancestor 924404dff337 local 94b33a1b7f2d+ remote e03727d2d66b
+  searching for copies back to rev 1
+  unmatched files in other:
+   b
+  all copies found (* = to merge, ! = divergent):
+   b -> a 
+  checking for directory renames
  a: other deleted -> r
  rev: versions differ -> m
  b: remote created -> g
 removing a
 getting b
+picked tool 'python ../merge' for rev (binary False symlink False)
 merging rev
 my rev@94b33a1b7f2d+ other rev@e03727d2d66b ancestor rev@924404dff337
 1 files updated, 1 files merged, 1 files removed, 0 files unresolved
@@ -140,7 +197,14 @@
 resolving manifests
  overwrite None partial False
  ancestor 924404dff337 local ecf3cb2a4219+ remote 97c705ade336
+  searching for copies back to rev 1
+  unmatched files in local:
+   b
+  all copies found (* = to merge, ! = divergent):
+   b -> a 
+  checking for directory renames
  rev: versions differ -> m
+picked tool 'python ../merge' for rev (binary False symlink False)
 merging rev
 my rev@ecf3cb2a4219+ other rev@97c705ade336 ancestor rev@924404dff337
 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
@@ -155,10 +219,13 @@
 resolving manifests
  overwrite None partial False
  ancestor 924404dff337 local ec03c2ca8642+ remote 79cc6877a3b7
+  searching for copies back to rev 1
  b: versions differ -> m
  rev: versions differ -> m
+picked tool 'python ../merge' for b (binary False symlink False)
 merging b
 my b@ec03c2ca8642+ other b@79cc6877a3b7 ancestor a@924404dff337
+picked tool 'python ../merge' for rev (binary False symlink False)
 merging rev
 my rev@ec03c2ca8642+ other rev@79cc6877a3b7 ancestor rev@924404dff337
 0 files updated, 2 files merged, 0 files removed, 0 files unresolved
@@ -173,6 +240,15 @@
 resolving manifests
  overwrite None partial False
  ancestor 924404dff337 local ecf3cb2a4219+ remote e6abcc1a30c2
+  searching for copies back to rev 1
+  unmatched files in local:
+   b
+  unmatched files in other:
+   c
+  all copies found (* = to merge, ! = divergent):
+   c -> a !
+   b -> a !
+  checking for directory renames
  a: divergent renames -> dr
  rev: versions differ -> m
  c: remote created -> g
@@ -180,6 +256,7 @@
  b
  c
 getting c
+picked tool 'python ../merge' for rev (binary False symlink False)
 merging rev
 my rev@ecf3cb2a4219+ other rev@e6abcc1a30c2 ancestor rev@924404dff337
 1 files updated, 1 files merged, 0 files removed, 0 files unresolved
@@ -195,10 +272,13 @@
 resolving manifests
  overwrite None partial False
  ancestor 924404dff337 local ac809aeed39a+ remote af30c7647fc7
+  searching for copies back to rev 1
  b: versions differ -> m
  rev: versions differ -> m
+picked tool 'python ../merge' for b (binary False symlink False)
 merging b
 my b@ac809aeed39a+ other b@af30c7647fc7 ancestor b@000000000000
+picked tool 'python ../merge' for rev (binary False symlink False)
 merging rev
 my rev@ac809aeed39a+ other rev@af30c7647fc7 ancestor rev@924404dff337
 0 files updated, 2 files merged, 0 files removed, 0 files unresolved
@@ -214,12 +294,15 @@
 resolving manifests
  overwrite None partial False
  ancestor 924404dff337 local 59318016310c+ remote e03727d2d66b
+  searching for copies back to rev 1
  a: other deleted -> r
  b: versions differ -> m
  rev: versions differ -> m
 removing a
+picked tool 'python ../merge' for b (binary False symlink False)
 merging b
 my b@59318016310c+ other b@e03727d2d66b ancestor b@000000000000
+picked tool 'python ../merge' for rev (binary False symlink False)
 merging rev
 my rev@59318016310c+ other rev@e03727d2d66b ancestor rev@924404dff337
 0 files updated, 2 files merged, 1 files removed, 0 files unresolved
@@ -234,12 +317,15 @@
 resolving manifests
  overwrite None partial False
  ancestor 924404dff337 local ac809aeed39a+ remote 8dbce441892a
+  searching for copies back to rev 1
  a: remote is newer -> g
  b: versions differ -> m
  rev: versions differ -> m
 getting a
+picked tool 'python ../merge' for b (binary False symlink False)
 merging b
 my b@ac809aeed39a+ other b@8dbce441892a ancestor b@000000000000
+picked tool 'python ../merge' for rev (binary False symlink False)
 merging rev
 my rev@ac809aeed39a+ other rev@8dbce441892a ancestor rev@924404dff337
 1 files updated, 2 files merged, 0 files removed, 0 files unresolved
@@ -255,12 +341,15 @@
 resolving manifests
  overwrite None partial False
  ancestor 924404dff337 local 59318016310c+ remote e03727d2d66b
+  searching for copies back to rev 1
  a: other deleted -> r
  b: versions differ -> m
  rev: versions differ -> m
 removing a
+picked tool 'python ../merge' for b (binary False symlink False)
 merging b
 my b@59318016310c+ other b@e03727d2d66b ancestor b@000000000000
+picked tool 'python ../merge' for rev (binary False symlink False)
 merging rev
 my rev@59318016310c+ other rev@e03727d2d66b ancestor rev@924404dff337
 0 files updated, 2 files merged, 1 files removed, 0 files unresolved
@@ -275,12 +364,15 @@
 resolving manifests
  overwrite None partial False
  ancestor 924404dff337 local ac809aeed39a+ remote 8dbce441892a
+  searching for copies back to rev 1
  a: remote is newer -> g
  b: versions differ -> m
  rev: versions differ -> m
 getting a
+picked tool 'python ../merge' for b (binary False symlink False)
 merging b
 my b@ac809aeed39a+ other b@8dbce441892a ancestor b@000000000000
+picked tool 'python ../merge' for rev (binary False symlink False)
 merging rev
 my rev@ac809aeed39a+ other rev@8dbce441892a ancestor rev@924404dff337
 1 files updated, 2 files merged, 0 files removed, 0 files unresolved
@@ -296,10 +388,13 @@
 resolving manifests
  overwrite None partial False
  ancestor 924404dff337 local 0b76e65c8289+ remote 735846fee2d7
+  searching for copies back to rev 1
  b: versions differ -> m
  rev: versions differ -> m
+picked tool 'python ../merge' for b (binary False symlink False)
 merging b
 my b@0b76e65c8289+ other b@735846fee2d7 ancestor b@000000000000
+picked tool 'python ../merge' for rev (binary False symlink False)
 merging rev
 my rev@0b76e65c8289+ other rev@735846fee2d7 ancestor rev@924404dff337
 0 files updated, 2 files merged, 0 files removed, 0 files unresolved
@@ -315,12 +410,15 @@
 resolving manifests
  overwrite None partial False
  ancestor 924404dff337 local ecf3cb2a4219+ remote 8dbce441892a
+  searching for copies back to rev 1
  b: versions differ -> m
  rev: versions differ -> m
  a: prompt recreating -> g
 getting a
+picked tool 'python ../merge' for b (binary False symlink False)
 merging b
 my b@ecf3cb2a4219+ other b@8dbce441892a ancestor b@000000000000
+picked tool 'python ../merge' for rev (binary False symlink False)
 merging rev
 my rev@ecf3cb2a4219+ other rev@8dbce441892a ancestor rev@924404dff337
 1 files updated, 2 files merged, 0 files removed, 0 files unresolved
@@ -336,10 +434,13 @@
 resolving manifests
  overwrite None partial False
  ancestor 924404dff337 local 0b76e65c8289+ remote e03727d2d66b
+  searching for copies back to rev 1
  b: versions differ -> m
  rev: versions differ -> m
+picked tool 'python ../merge' for b (binary False symlink False)
 merging b
 my b@0b76e65c8289+ other b@e03727d2d66b ancestor b@000000000000
+picked tool 'python ../merge' for rev (binary False symlink False)
 merging rev
 my rev@0b76e65c8289+ other rev@e03727d2d66b ancestor rev@924404dff337
 0 files updated, 2 files merged, 0 files removed, 0 files unresolved
@@ -355,12 +456,20 @@
 resolving manifests
  overwrite None partial False
  ancestor 924404dff337 local e300d1c794ec+ remote 79cc6877a3b7
+  searching for copies back to rev 1
+  unmatched files in other:
+   b
+  all copies found (* = to merge, ! = divergent):
+   b -> a *
+  checking for directory renames
  rev: versions differ -> m
  a: remote moved to b -> m
+copying a to b
+picked tool 'python ../merge' for a (binary False symlink False)
 merging a and b
 my a@e300d1c794ec+ other b@79cc6877a3b7 ancestor a@924404dff337
-copying a to b
 removing a
+picked tool 'python ../merge' for rev (binary False symlink False)
 merging rev
 my rev@e300d1c794ec+ other rev@79cc6877a3b7 ancestor rev@924404dff337
 0 files updated, 2 files merged, 0 files removed, 0 files unresolved
@@ -376,10 +485,18 @@
 resolving manifests
  overwrite None partial False
  ancestor 924404dff337 local ec03c2ca8642+ remote f4db7e329e71
+  searching for copies back to rev 1
+  unmatched files in local:
+   b
+  all copies found (* = to merge, ! = divergent):
+   b -> a *
+  checking for directory renames
  b: local moved to a -> m
  rev: versions differ -> m
+picked tool 'python ../merge' for b (binary False symlink False)
 merging b and a
 my b@ec03c2ca8642+ other a@f4db7e329e71 ancestor a@924404dff337
+picked tool 'python ../merge' for rev (binary False symlink False)
 merging rev
 my rev@ec03c2ca8642+ other rev@f4db7e329e71 ancestor rev@924404dff337
 0 files updated, 2 files merged, 0 files removed, 0 files unresolved
@@ -395,12 +512,23 @@
 resolving manifests
  overwrite None partial False
  ancestor 924404dff337 local ecf3cb2a4219+ remote 2b958612230f
+  searching for copies back to rev 1
+  unmatched files in local:
+   b
+  unmatched files in other:
+   c
+  all copies found (* = to merge, ! = divergent):
+   b -> a *
+  checking for directory renames
  b: local moved to a -> m
  rev: versions differ -> m
  c: remote created -> g
+picked tool 'python ../merge' for b (binary False symlink False)
 merging b and a
 my b@ecf3cb2a4219+ other a@2b958612230f ancestor a@924404dff337
+ premerge successful
 getting c
+picked tool 'python ../merge' for rev (binary False symlink False)
 merging rev
 my rev@ecf3cb2a4219+ other rev@2b958612230f ancestor rev@924404dff337
 1 files updated, 2 files merged, 0 files removed, 0 files unresolved
--- a/tests/test-rename.out	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-rename.out	Wed Feb 06 19:57:52 2008 -0800
@@ -29,14 +29,10 @@
 R d2/b
 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
 # rename directory d1 as d3
-copying d1/a to d3/a
-copying d1/b to d3/b
-copying d1/ba to d3/ba
-copying d1/d11/a1 to d3/d11/a1
-removing d1/a
-removing d1/b
-removing d1/ba
-removing d1/d11/a1
+moving d1/a to d3/a
+moving d1/b to d3/b
+moving d1/ba to d3/ba
+moving d1/d11/a1 to d3/d11/a1
 A d3/a
   d1/a
 A d3/b
@@ -51,14 +47,10 @@
 R d1/d11/a1
 4 files updated, 0 files merged, 4 files removed, 0 files unresolved
 # rename --after directory d1 as d3
-copying d1/a to d3/a
-copying d1/b to d3/b
-copying d1/ba to d3/ba
-copying d1/d11/a1 to d3/d11/a1
-removing d1/a
-removing d1/b
-removing d1/ba
-removing d1/d11/a1
+moving d1/a to d3/a
+moving d1/b to d3/b
+moving d1/ba to d3/ba
+moving d1/d11/a1 to d3/d11/a1
 A d3/a
   d1/a
 A d3/b
@@ -73,37 +65,29 @@
 R d1/d11/a1
 4 files updated, 0 files merged, 4 files removed, 0 files unresolved
 # move a directory using a relative path
-copying ../d1/d11/a1 to d3/d11/a1
-removing ../d1/d11/a1
+moving ../d1/d11/a1 to d3/d11/a1
 A d2/d3/d11/a1
   d1/d11/a1
 R d1/d11/a1
 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
 # move --after a directory using a relative path
-copying ../d1/d11/a1 to d3/d11/a1
-removing ../d1/d11/a1
+moving ../d1/d11/a1 to d3/d11/a1
 A d2/d3/d11/a1
   d1/d11/a1
 R d1/d11/a1
 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
 # move directory d1/d11 to an existing directory d2 (removes empty d1)
-copying d1/d11/a1 to d2/d11/a1
-removing d1/d11/a1
+moving d1/d11/a1 to d2/d11/a1
 A d2/d11/a1
   d1/d11/a1
 R d1/d11/a1
 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
 # move directories d1 and d2 to a new directory d3
-copying d1/a to d3/d1/a
-copying d1/b to d3/d1/b
-copying d1/ba to d3/d1/ba
-copying d1/d11/a1 to d3/d1/d11/a1
-copying d2/b to d3/d2/b
-removing d1/a
-removing d1/b
-removing d1/ba
-removing d1/d11/a1
-removing d2/b
+moving d1/a to d3/d1/a
+moving d1/b to d3/d1/b
+moving d1/ba to d3/d1/ba
+moving d1/d11/a1 to d3/d1/d11/a1
+moving d2/b to d3/d2/b
 A d3/d1/a
   d1/a
 A d3/d1/b
@@ -121,16 +105,11 @@
 R d2/b
 5 files updated, 0 files merged, 5 files removed, 0 files unresolved
 # move --after directories d1 and d2 to a new directory d3
-copying d1/a to d3/d1/a
-copying d1/b to d3/d1/b
-copying d1/ba to d3/d1/ba
-copying d1/d11/a1 to d3/d1/d11/a1
-copying d2/b to d3/d2/b
-removing d1/a
-removing d1/b
-removing d1/ba
-removing d1/d11/a1
-removing d2/b
+moving d1/a to d3/d1/a
+moving d1/b to d3/d1/b
+moving d1/ba to d3/d1/ba
+moving d1/d11/a1 to d3/d1/d11/a1
+moving d2/b to d3/d2/b
 A d3/d1/a
   d1/a
 A d3/d1/b
@@ -150,8 +129,7 @@
 # move everything under directory d1 to existing directory d2, do not
 # overwrite existing files (d2/b)
 d2/b: not overwriting - file exists
-copying d1/d11/a1 to d2/d11/a1
-removing d1/d11/a1
+moving d1/d11/a1 to d2/d11/a1
 A d2/a
   d1/a
 A d2/ba
@@ -166,18 +144,17 @@
 ---
 > d2/b
 3 files updated, 0 files merged, 3 files removed, 0 files unresolved
+# attempt to move one file into a non-existent directory
+abort: destination dx/ is not a directory
+0 files updated, 0 files merged, 0 files removed, 0 files unresolved
 # attempt to move potentially more than one file into a non-existent
 # directory
 abort: with multiple sources, destination must be an existing directory
 # move every file under d1 to d2/d21 (glob)
-copying d1/a to d2/d21/a
-copying d1/b to d2/d21/b
-copying d1/ba to d2/d21/ba
-copying d1/d11/a1 to d2/d21/a1
-removing d1/a
-removing d1/b
-removing d1/ba
-removing d1/d11/a1
+moving d1/a to d2/d21/a
+moving d1/b to d2/d21/b
+moving d1/ba to d2/d21/ba
+moving d1/d11/a1 to d2/d21/a1
 A d2/d21/a
   d1/a
 A d2/d21/a1
@@ -192,10 +169,8 @@
 R d1/d11/a1
 4 files updated, 0 files merged, 4 files removed, 0 files unresolved
 # move --after some files under d1 to d2/d21 (glob)
-copying d1/a to d2/d21/a
-copying d1/d11/a1 to d2/d21/a1
-removing d1/a
-removing d1/d11/a1
+moving d1/a to d2/d21/a
+moving d1/d11/a1 to d2/d21/a1
 A d2/d21/a
   d1/a
 A d2/d21/a1
@@ -204,10 +179,8 @@
 R d1/d11/a1
 2 files updated, 0 files merged, 2 files removed, 0 files unresolved
 # move every file under d1 starting with an 'a' to d2/d21 (regexp)
-copying d1/a to d2/d21/a
-copying d1/d11/a1 to d2/d21/a1
-removing d1/a
-removing d1/d11/a1
+moving d1/a to d2/d21/a
+moving d1/d11/a1 to d2/d21/a1
 A d2/d21/a
   d1/a
 A d2/d21/a1
@@ -230,9 +203,8 @@
 R d1/ba
 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
 # do not copy more than one source file to the same destination file
-copying d1/d11/a1 to d3/d11/a1
+moving d1/d11/a1 to d3/d11/a1
 d3/b: not overwriting - d2/b collides with d1/b
-removing d1/d11/a1
 A d3/a
   d1/a
 A d3/b
@@ -247,14 +219,10 @@
 R d1/d11/a1
 4 files updated, 0 files merged, 4 files removed, 0 files unresolved
 # move a whole subtree with "hg rename ."
-copying a to ../d3/d1/a
-copying b to ../d3/d1/b
-copying ba to ../d3/d1/ba
-copying d11/a1 to ../d3/d1/d11/a1
-removing a
-removing b
-removing ba
-removing d11/a1
+moving a to ../d3/d1/a
+moving b to ../d3/d1/b
+moving ba to ../d3/d1/ba
+moving d11/a1 to ../d3/d1/d11/a1
 A d3/d1/a
   d1/a
 A d3/d1/b
@@ -269,14 +237,10 @@
 R d1/d11/a1
 4 files updated, 0 files merged, 4 files removed, 0 files unresolved
 # move a whole subtree with "hg rename --after ."
-copying a to ../d3/a
-copying b to ../d3/b
-copying ba to ../d3/ba
-copying d11/a1 to ../d3/d11/a1
-removing a
-removing b
-removing ba
-removing d11/a1
+moving a to ../d3/a
+moving b to ../d3/b
+moving ba to ../d3/ba
+moving d11/a1 to ../d3/d11/a1
 A d3/a
   d1/a
 A d3/b
@@ -291,14 +255,10 @@
 R d1/d11/a1
 4 files updated, 0 files merged, 4 files removed, 0 files unresolved
 # move the parent tree with "hg rename .."
-copying ../a to ../../d3/a
-copying ../b to ../../d3/b
-copying ../ba to ../../d3/ba
-copying a1 to ../../d3/d11/a1
-removing ../a
-removing ../b
-removing ../ba
-removing a1
+moving ../a to ../../d3/a
+moving ../b to ../../d3/b
+moving ../ba to ../../d3/ba
+moving a1 to ../../d3/d11/a1
 A d3/a
   d1/a
 A d3/b
@@ -313,12 +273,9 @@
 R d1/d11/a1
 4 files updated, 0 files merged, 4 files removed, 0 files unresolved
 # skip removed files
-copying d1/a to d3/a
-copying d1/ba to d3/ba
-copying d1/d11/a1 to d3/d11/a1
-removing d1/a
-removing d1/ba
-removing d1/d11/a1
+moving d1/a to d3/a
+moving d1/ba to d3/ba
+moving d1/d11/a1 to d3/d11/a1
 A d3/a
   d1/a
 A d3/ba
--- a/tests/test-revert	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-revert	Wed Feb 06 19:57:52 2008 -0800
@@ -94,4 +94,9 @@
 hg revert b newdir
 echo foobar > b/b
 hg revert .
-true
+
+echo % reverting a rename target should revert the source
+hg mv a newa
+hg revert newa
+hg st a newa
+
--- a/tests/test-revert.out	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-revert.out	Wed Feb 06 19:57:52 2008 -0800
@@ -63,3 +63,5 @@
 reverting b/b
 forgetting newdir/newfile
 reverting b/b
+% reverting a rename target should revert the source
+? newa
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-revlog-packentry	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,14 @@
+#!/bin/sh
+
+hg init repo
+cd repo
+
+touch foo
+hg ci -Am 'add foo'
+
+hg up -C null
+# this should be stored as a delta against rev 0
+echo foo bar baz > foo
+hg ci -Am 'add foo again'
+
+hg debugindex .hg/store/data/foo.i
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-revlog-packentry.out	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,6 @@
+adding foo
+0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+adding foo
+   rev    offset  length   base linkrev nodeid       p1           p2
+     0         0       0      0       0 b80de5d13875 000000000000 000000000000
+     1         0      24      0       1 0376abec49b8 000000000000 000000000000
--- a/tests/test-ro-message	Thu Jul 26 07:56:27 2007 -0400
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,17 +0,0 @@
-#!/bin/sh
-HG=hg
-"$HG" init
-mkdir b
-echo 'Bouncy' >b/bouncy
-echo 'tricycle' >b/vehicle
-"$HG" add b/bouncy
-"$HG" add b/vehicle
-"$HG" commit -m 'Adding bouncy'
-echo 'bouncy' >>b/bouncy
-"$HG" commit -m 'Making it bouncier'
-"$HG" update -C 0
-echo 'stationary' >>b/vehicle
-"$HG" commit -m 'Clarifying the vehicle.'
-"$HG" update -C 1
-chmod a-w b/vehicle
-"$HG" merge 2 2>&1 | sed 's|^\(.*[ 	]\).*/\([^/]*/[^/]*/[^/]*\)$|\1\2|g'
--- a/tests/test-ro-message.out	Thu Jul 26 07:56:27 2007 -0400
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,3 +0,0 @@
-1 files updated, 0 files merged, 0 files removed, 0 files unresolved
-2 files updated, 0 files merged, 0 files removed, 0 files unresolved
-abort: Permission denied: test-ro-message/b/vehicle
--- a/tests/test-rollback	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-rollback	Wed Feb 06 19:57:52 2008 -0800
@@ -13,3 +13,9 @@
 hg verify
 hg parents
 hg status
+
+# Test issue 902
+hg commit -m "test"
+hg branch test
+hg rollback
+hg branch
--- a/tests/test-rollback.out	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-rollback.out	Wed Feb 06 19:57:52 2008 -0800
@@ -16,3 +16,6 @@
 checking files
 0 files, 0 changesets, 0 total revisions
 A a
+marked working directory as branch test
+rolling back last transaction
+default
--- a/tests/test-serve	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-serve	Wed Feb 06 19:57:52 2008 -0800
@@ -7,12 +7,40 @@
 echo 'accesslog = access.log' >> .hg/hgrc
 
 echo % Without -v
-hg serve -a localhost -p 20063 -d --pid-file=hg.pid
+hg serve -a localhost -p $HGPORT -d --pid-file=hg.pid
 cat hg.pid >> "$DAEMON_PIDS"
 if [ -f access.log ]; then
     echo 'access log created - .hg/hgrc respected'
 fi
 
 echo % With -v
-hg serve -a localhost -p 20064 -d --pid-file=hg.pid -v
+hg serve -a localhost -p $HGPORT1 -d --pid-file=hg.pid -v | sed -e 's,:[0-9][0-9]*/,/,'
+cat hg.pid >> "$DAEMON_PIDS"
+sleep 1
+kill `cat hg.pid`
+sleep 1
+
+echo % With --prefix foo
+hg serve -a localhost -p $HGPORT1 -d --pid-file=hg.pid -v --prefix foo | sed -e 's,:[0-9][0-9]*/,/,'
 cat hg.pid >> "$DAEMON_PIDS"
+sleep 1
+kill `cat hg.pid`
+sleep 1
+
+echo % With --prefix /foo
+hg serve -a localhost -p $HGPORT1 -d --pid-file=hg.pid -v --prefix /foo | sed -e 's,:[0-9][0-9]*/,/,'
+cat hg.pid >> "$DAEMON_PIDS"
+sleep 1
+kill `cat hg.pid`
+sleep 1
+
+echo % With --prefix foo/
+hg serve -a localhost -p $HGPORT1 -d --pid-file=hg.pid -v --prefix foo/ | sed -e 's,:[0-9][0-9]*/,/,'
+cat hg.pid >> "$DAEMON_PIDS"
+sleep 1
+kill `cat hg.pid`
+sleep 1
+
+echo % With --prefix /foo/
+hg serve -a localhost -p $HGPORT1 -d --pid-file=hg.pid -v --prefix /foo/ | sed -e 's,:[0-9][0-9]*/,/,'
+cat hg.pid >> "$DAEMON_PIDS"
--- a/tests/test-serve.out	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-serve.out	Wed Feb 06 19:57:52 2008 -0800
@@ -1,4 +1,12 @@
 % Without -v
 access log created - .hg/hgrc respected
 % With -v
-listening at http://localhost:20064/
+listening at http://localhost/
+% With --prefix foo
+listening at http://localhost/foo/
+% With --prefix /foo
+listening at http://localhost/foo/
+% With --prefix foo/
+listening at http://localhost/foo/
+% With --prefix /foo/
+listening at http://localhost/foo/
--- a/tests/test-simple-update.out	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-simple-update.out	Wed Feb 06 19:57:52 2008 -0800
@@ -21,4 +21,4 @@
 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
 foo
 bar
-6f4310b00b9a147241b071a60c28a650827fb03d 644 foo
+6f4310b00b9a147241b071a60c28a650827fb03d 644   foo
Binary file tests/test-simplemerge-cmd.out has changed
--- a/tests/test-simplemerge.py	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-simplemerge.py	Wed Feb 06 19:57:52 2008 -0800
@@ -19,14 +19,24 @@
 from unittest import TestCase
 import imp
 import shutil
-from mercurial import util
+from mercurial import util, simplemerge
+
+# bzr compatible interface, for the tests
+class Merge3(simplemerge.Merge3Text):
+    """3-way merge of texts.
 
-# copy simplemerge to the cwd to avoid creating a .pyc file in the source tree
-shutil.copyfile(os.path.join(os.environ['TESTDIR'], os.path.pardir,
-                             'contrib', 'simplemerge'),
-                'simplemerge.py')
-simplemerge = imp.load_source('simplemerge', 'simplemerge.py')
-Merge3 = simplemerge.Merge3
+    Given BASE, OTHER, THIS, tries to produce a combined text
+    incorporating the changes from both BASE->OTHER and BASE->THIS.
+    All three will typically be sequences of lines."""
+    def __init__(self, base, a, b):
+        basetext = '\n'.join([i.strip('\n') for i in base] + [''])
+        atext = '\n'.join([i.strip('\n') for i in a] + [''])
+        btext = '\n'.join([i.strip('\n') for i in b] + [''])
+        if util.binary(basetext) or util.binary(atext) or util.binary(btext):
+            raise util.Abort("don't know how to merge binary files")
+        simplemerge.Merge3Text.__init__(self, basetext, atext, btext,
+                                        base, a, b)
+
 CantReprocessAndShowBase = simplemerge.CantReprocessAndShowBase
 
 def split_lines(t):
--- a/tests/test-ssh	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-ssh	Wed Feb 06 19:57:52 2008 -0800
@@ -27,6 +27,11 @@
 sys.exit(bool(r))
 EOF
 
+cat <<EOF > badhook
+import sys
+sys.stdout.write("KABOOM")
+EOF
+
 echo "# creating 'remote'"
 hg init remote
 cd remote
@@ -91,13 +96,16 @@
 
 echo z > z
 hg ci -A -m z -d '1000001 0' z
+# a bad, evil hook that prints to stdout
+echo 'changegroup.stdout = python ../badhook' >> .hg/hgrc
 
 cd ../local
 echo r > r
 hg ci -A -m z -d '1000002 0' r
 
-echo "# push should succeed"
+echo "# push should succeed even though it has an unexpected response"
 hg push
+hg -R ../remote heads
 
 cd ..
 cat dummylog
--- a/tests/test-ssh.out	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-ssh.out	Wed Feb 06 19:57:52 2008 -0800
@@ -70,7 +70,7 @@
 checking files
 2 files, 2 changesets, 3 total revisions
 bleah
-# push should succeed
+# push should succeed even though it has an unexpected response
 pushing to ssh://user@dummy/remote
 searching for changes
 note: unsynced remote changes!
@@ -78,6 +78,21 @@
 remote: adding manifests
 remote: adding file changes
 remote: added 1 changesets with 1 changes to 1 files
+abort: unexpected response:
+'KABOOM1\n'
+changeset:   3:ac7448082955
+tag:         tip
+parent:      1:572896fe480d
+user:        test
+date:        Mon Jan 12 13:46:42 1970 +0000
+summary:     z
+
+changeset:   2:187c6caa0d1e
+parent:      0:e34318c26897
+user:        test
+date:        Mon Jan 12 13:46:41 1970 +0000
+summary:     z
+
 Got arguments 1:user@dummy 2:hg -R nonexistent serve --stdio
 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
--- a/tests/test-static-http	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-static-http	Wed Feb 06 19:57:52 2008 -0800
@@ -2,18 +2,18 @@
 
 cp "$TESTDIR"/printenv.py .
 
-http_proxy= hg clone static-http://localhost:20059/ copy
+http_proxy= hg clone static-http://localhost:$HGPORT/ copy
 echo $?
 test -d copy || echo copy: No such file or directory
 
 # This server doesn't do range requests so it's basically only good for
 # one pull
 cat > dumb.py <<EOF
-import BaseHTTPServer, SimpleHTTPServer, signal
+import BaseHTTPServer, SimpleHTTPServer, os, signal
 
 def run(server_class=BaseHTTPServer.HTTPServer,
         handler_class=SimpleHTTPServer.SimpleHTTPRequestHandler):
-    server_address = ('localhost', 20059)
+    server_address = ('localhost', int(os.environ['HGPORT']))
     httpd = server_class(server_address, handler_class)
     httpd.serve_forever()
 
@@ -34,7 +34,7 @@
 
 cd ..
 
-http_proxy= hg clone static-http://localhost:20059/remote local
+http_proxy= hg clone static-http://localhost:$HGPORT/remote local | sed -e 's,:[0-9][0-9]*/,/,'
 
 cd local
 hg verify
@@ -47,6 +47,20 @@
 cd ../local
 echo '[hooks]' >> .hg/hgrc
 echo 'changegroup = python ../printenv.py changegroup' >> .hg/hgrc
-http_proxy= hg pull
+http_proxy= hg pull | sed -e 's,:[0-9][0-9]*/,/,'
+
+echo '% test with "/" URI (issue 747)'
+cd ..
+hg init
+echo a > a
+hg add a
+hg ci -ma
+
+http_proxy= hg clone static-http://localhost:$HGPORT/ local2 | sed -e 's,:[0-9][0-9]*/,/,'
+
+cd local2
+hg verify
+cat a
+hg paths | sed -e 's,:[0-9][0-9]*/,/,'
 
 kill $!
--- a/tests/test-static-http.out	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-static-http.out	Wed Feb 06 19:57:52 2008 -0800
@@ -20,11 +20,25 @@
 1 files, 1 changesets, 1 total revisions
 foo
 adding quux
-changegroup hook: HG_NODE=34401e0e9971e9720b613d9089ffa9a6eefb3d2d HG_SOURCE=pull HG_URL=static-http://localhost:20059/remote 
-pulling from static-http://localhost:20059/remote
+changegroup hook: HG_NODE=34401e0e9971e9720b613d9089ffa9a6eefb3d2d HG_SOURCE=pull HG_URL=static-http://localhost/remote 
+pulling from static-http://localhost/remote
 searching for changes
 adding changesets
 adding manifests
 adding file changes
 added 1 changesets with 1 changes to 1 files
 (run 'hg update' to get a working copy)
+% test with "/" URI (issue 747)
+requesting all changes
+adding changesets
+adding manifests
+adding file changes
+added 1 changesets with 1 changes to 1 files
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+checking changesets
+checking manifests
+crosschecking files in changesets and manifests
+checking files
+1 files, 1 changesets, 1 total revisions
+a
+default = static-http://localhost/
--- a/tests/test-strict.out	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-strict.out	Wed Feb 06 19:57:52 2008 -0800
@@ -18,7 +18,6 @@
  pull       pull changes from the specified source
  push       push changes to the specified destination
  remove     remove the specified files on the next commit
- revert     revert files or dirs to their states as of some revision
  serve      export the repository via HTTP
  status     show changed files in the working directory
  update     update working directory
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-strip-cross	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,68 @@
+#!/bin/sh
+
+# test stripping of filelogs where the linkrev doesn't always increase
+
+echo '[extensions]' >> $HGRCPATH
+echo 'hgext.mq =' >> $HGRCPATH
+
+hg init orig
+cd orig
+
+hidefilename()
+{
+    sed -e 's/saving bundle to .*strip-backup/saving bundle to strip-backup/'
+}
+
+commit()
+{
+    hg up -qC null
+    count=1
+    for i in "$@"; do
+	for f in $i; do
+	    echo $count > $f
+	done
+	count=`expr $count + 1`
+    done
+    hg commit -qAm "$*"
+}
+
+# 2 1 0 2 0 1 2
+commit '201 210'
+
+commit '102 120' '210'
+
+commit '021'
+
+commit '201' '021 120'
+
+commit '012 021' '102 201' '120 210'
+
+commit 'manifest-file'
+
+commit '102 120' '012 210' '021 201'
+
+commit '201 210' '021 120' '012 102'
+
+HGUSER=another-user; export HGUSER
+commit 'manifest-file'
+
+commit '012' 'manifest-file'
+
+cd ..
+hg clone -q -U -r -1 -r -2 -r -3 -r -4 -r -6 orig crossed
+
+for i in crossed/.hg/store/00manifest.i crossed/.hg/store/data/*.i; do
+    echo $i
+    hg debugindex $i
+    echo
+done
+
+for i in 0 1 2 3 4; do
+    hg clone -q -U --pull crossed $i
+    echo "% Trying to strip revision $i"
+    hg --cwd $i strip $i 2>&1 | hidefilename
+    echo "% Verifying"
+    hg --cwd $i verify
+    echo
+done
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-strip-cross.out	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,118 @@
+crossed/.hg/store/00manifest.i
+   rev    offset  length   base linkrev nodeid       p1           p2
+     0         0     112      0       0 6f105cbb914d 000000000000 000000000000
+     1       112      56      1       3 1b55917b3699 000000000000 000000000000
+     2       168     123      1       1 8f3d04e263e5 000000000000 000000000000
+     3       291     122      1       2 f0ef8726ac4f 000000000000 000000000000
+     4       413      87      4       4 0b76e38b4070 000000000000 000000000000
+
+crossed/.hg/store/data/012.i
+   rev    offset  length   base linkrev nodeid       p1           p2
+     0         0       3      0       0 b8e02f643373 000000000000 000000000000
+     1         3       3      1       1 5d9299349fc0 000000000000 000000000000
+     2         6       3      2       2 2661d26c6496 000000000000 000000000000
+
+crossed/.hg/store/data/021.i
+   rev    offset  length   base linkrev nodeid       p1           p2
+     0         0       3      0       0 b8e02f643373 000000000000 000000000000
+     1         3       3      1       2 5d9299349fc0 000000000000 000000000000
+     2         6       3      2       1 2661d26c6496 000000000000 000000000000
+
+crossed/.hg/store/data/102.i
+   rev    offset  length   base linkrev nodeid       p1           p2
+     0         0       3      0       1 b8e02f643373 000000000000 000000000000
+     1         3       3      1       0 5d9299349fc0 000000000000 000000000000
+     2         6       3      2       2 2661d26c6496 000000000000 000000000000
+
+crossed/.hg/store/data/120.i
+   rev    offset  length   base linkrev nodeid       p1           p2
+     0         0       3      0       1 b8e02f643373 000000000000 000000000000
+     1         3       3      1       2 5d9299349fc0 000000000000 000000000000
+     2         6       3      2       0 2661d26c6496 000000000000 000000000000
+
+crossed/.hg/store/data/201.i
+   rev    offset  length   base linkrev nodeid       p1           p2
+     0         0       3      0       2 b8e02f643373 000000000000 000000000000
+     1         3       3      1       0 5d9299349fc0 000000000000 000000000000
+     2         6       3      2       1 2661d26c6496 000000000000 000000000000
+
+crossed/.hg/store/data/210.i
+   rev    offset  length   base linkrev nodeid       p1           p2
+     0         0       3      0       2 b8e02f643373 000000000000 000000000000
+     1         3       3      1       1 5d9299349fc0 000000000000 000000000000
+     2         6       3      2       0 2661d26c6496 000000000000 000000000000
+
+crossed/.hg/store/data/manifest-file.i
+   rev    offset  length   base linkrev nodeid       p1           p2
+     0         0       3      0       3 b8e02f643373 000000000000 000000000000
+     1         3       3      1       4 5d9299349fc0 000000000000 000000000000
+
+% Trying to strip revision 0
+saving bundle to strip-backup/cbb8c2f0a2e3-backup
+saving bundle to strip-backup/cbb8c2f0a2e3-temp
+adding branch
+adding changesets
+adding manifests
+adding file changes
+added 4 changesets with 15 changes to 7 files (+3 heads)
+% Verifying
+checking changesets
+checking manifests
+crosschecking files in changesets and manifests
+checking files
+7 files, 4 changesets, 15 total revisions
+
+% Trying to strip revision 1
+saving bundle to strip-backup/124ecc0cbec9-backup
+saving bundle to strip-backup/124ecc0cbec9-temp
+adding branch
+adding changesets
+adding manifests
+adding file changes
+added 3 changesets with 12 changes to 7 files (+3 heads)
+% Verifying
+checking changesets
+checking manifests
+crosschecking files in changesets and manifests
+checking files
+7 files, 4 changesets, 14 total revisions
+
+% Trying to strip revision 2
+saving bundle to strip-backup/f6439b304a1a-backup
+saving bundle to strip-backup/f6439b304a1a-temp
+adding branch
+adding changesets
+adding manifests
+adding file changes
+added 2 changesets with 8 changes to 6 files (+2 heads)
+% Verifying
+checking changesets
+checking manifests
+crosschecking files in changesets and manifests
+checking files
+7 files, 4 changesets, 14 total revisions
+
+% Trying to strip revision 3
+saving bundle to strip-backup/6e54ec5db740-backup
+saving bundle to strip-backup/6e54ec5db740-temp
+adding branch
+adding changesets
+adding manifests
+adding file changes
+added 1 changesets with 1 changes to 2 files (+1 heads)
+% Verifying
+checking changesets
+checking manifests
+crosschecking files in changesets and manifests
+checking files
+7 files, 4 changesets, 19 total revisions
+
+% Trying to strip revision 4
+saving bundle to strip-backup/9147ea23c156-backup
+% Verifying
+checking changesets
+checking manifests
+crosschecking files in changesets and manifests
+checking files
+7 files, 4 changesets, 19 total revisions
+
--- a/tests/test-symlink-basic	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-symlink-basic	Wed Feb 06 19:57:52 2008 -0800
@@ -7,14 +7,6 @@
     sed -e "s:/.*\(/test-symlink-basic/.*\):...\1:"
 }
 
-cat >> readlink.py <<EOF
-import os
-import sys
-
-for f in sys.argv[1:]:
-    print f, '->', os.readlink(f)
-EOF
-
 hg init a
 cd a
 ln -s nothing dangling
@@ -25,29 +17,29 @@
 hg tip -v
 hg manifest --debug
 echo '% rev 0:'
-python ../readlink.py dangling
+$TESTDIR/readlink.py dangling
 
 rm dangling
 ln -s void dangling
 hg commit -m 'change symlink'
 echo '% rev 1:'
-python ../readlink.py dangling
+$TESTDIR/readlink.py dangling
 
 echo '% modifying link'
 rm dangling
 ln -s empty dangling
-python ../readlink.py dangling
+$TESTDIR/readlink.py dangling
 
 echo '% reverting to rev 0:'
 hg revert -r 0 -a
-python ../readlink.py dangling
+$TESTDIR/readlink.py dangling
 
 echo '% backups:'
-python ../readlink.py *.orig
+$TESTDIR/readlink.py *.orig
 
 rm *.orig
 hg up -C
 echo '% copies'
 hg cp -v dangling dangling2
 hg st -Cmard
-python ../readlink.py dangling dangling2
+$TESTDIR/readlink.py dangling dangling2
--- a/tests/test-symlink-basic.out	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-symlink-basic.out	Wed Feb 06 19:57:52 2008 -0800
@@ -8,7 +8,7 @@
 add symlink
 
 
-2564acbe54bbbedfbf608479340b359f04597f80 644 dangling
+2564acbe54bbbedfbf608479340b359f04597f80 644 @ dangling
 % rev 0:
 dangling -> nothing
 % rev 1:
--- a/tests/test-symlinks	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-symlinks	Wed Feb 06 19:57:52 2008 -0800
@@ -72,3 +72,13 @@
 echo '2. clone it'
 cd ..
 hg clone test testclone
+
+echo '# git symlink diff'
+cd testclone
+hg diff --git -r null:tip
+hg export --git tip > ../sl.diff
+echo '# import git symlink diff'
+hg rm a/b/c/demo
+hg commit -m'remove link'
+hg import ../sl.diff
+hg diff --git -r 1:tip
--- a/tests/test-symlinks.out	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-symlinks.out	Wed Feb 06 19:57:52 2008 -0800
@@ -20,3 +20,20 @@
 adding a/b/c/demo
 2. clone it
 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+# git symlink diff
+diff --git a/a/b/c/demo b/a/b/c/demo
+new file mode 120000
+--- /dev/null
++++ b/a/b/c/demo
+@@ -0,0 +1,1 @@
++/path/to/symlink/source
+\ No newline at end of file
+# import git symlink diff
+applying ../sl.diff
+diff --git a/a/b/c/demo b/a/b/c/demo
+new file mode 120000
+--- /dev/null
++++ b/a/b/c/demo
+@@ -0,0 +1,1 @@
++/path/to/symlink/source
+\ No newline at end of file
--- a/tests/test-tags	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-tags	Wed Feb 06 19:57:52 2008 -0800
@@ -116,13 +116,31 @@
 cd t4
 echo foo > foo
 hg add
-hg ci -m 'add foo' -d '0 0'                # rev 0
-hg tag -d '0 0' bar                        # rev 1 bar -> 0
-hg tag -d '0 0' -f bar                     # rev 2 bar -> 1
+hg ci -m 'add foo'                 # rev 0
+hg tag bar                         # rev 1 bar -> 0
+hg tag -f bar                      # rev 2 bar -> 1
 hg up -qC 0
-hg tag -d '0 0' -fr 2 bar                  # rev 3 bar -> 2
+hg tag -fr 2 bar                   # rev 3 bar -> 2
 hg tags
 hg up -qC 0
-hg tag -d '0 0' -m 'retag rev 0' -fr 0 bar # rev 4 bar -> 0, but bar stays at 2
+hg tag -m 'retag rev 0' -fr 0 bar  # rev 4 bar -> 0, but bar stays at 2
 echo % bar should still point to rev 2
 hg tags
+
+
+# test that removing global/local tags does not get confused when trying
+# to remove a tag of type X which actually only exists as a type Y
+cd ..
+hg init t5
+cd t5
+echo foo > foo
+hg add
+hg ci -m 'add foo'                 # rev 0
+
+hg tag -r 0 -l localtag
+hg tag --remove localtag
+
+hg tag -r 0 globaltag
+hg tag --remove -l globaltag
+hg tags -v
+exit 0
--- a/tests/test-tags.out	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-tags.out	Wed Feb 06 19:57:52 2008 -0800
@@ -71,3 +71,9 @@
 % bar should still point to rev 2
 tip                                4:40af5d225513
 bar                                2:72b852876a42
+adding foo
+abort: localtag tag is local
+abort: globaltag tag is global
+tip                                1:a0b6fe111088
+localtag                           0:bbd179dfa0a7 local
+globaltag                          0:bbd179dfa0a7
--- a/tests/test-transplant	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-transplant	Wed Feb 06 19:57:52 2008 -0800
@@ -56,12 +56,12 @@
 hg transplant -s ../remote -a -b tip
 
 echo '% remote transplant with pull'
-hg -R ../t serve -p 20062 -d --pid-file=../t.pid
+hg -R ../t serve -p $HGPORT -d --pid-file=../t.pid
 cat ../t.pid >> $DAEMON_PIDS
 
 hg clone -r 0 ../t ../rp
 cd ../rp
-hg transplant -s http://localhost:20062/ 2 4
+hg transplant -s http://localhost:$HGPORT/ 2 4
 hg log --template '{rev} {parents} {desc}\n'
 
 echo '% transplant --continue'
--- a/tests/test-trusted.py	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-trusted.py	Wed Feb 06 19:57:52 2008 -0800
@@ -6,6 +6,9 @@
 from mercurial import ui, util
 
 hgrc = os.environ['HGRCPATH']
+f = open(hgrc)
+basehgrc = f.read()
+f.close()
 
 def testui(user='foo', group='bar', tusers=(), tgroups=(),
            cuser='foo', cgroup='bar', debug=False, silent=False):
@@ -16,7 +19,8 @@
     # write a global hgrc with the list of trusted users/groups and
     # some setting so that we can be sure it was read
     f = open(hgrc, 'w')
-    f.write('[paths]\n')
+    f.write(basehgrc)
+    f.write('\n[paths]\n')
     f.write('global = /some/path\n\n')
 
     if tusers or tgroups:
--- a/tests/test-ui-config	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-ui-config	Wed Feb 06 19:57:52 2008 -0800
@@ -1,10 +1,10 @@
 #!/usr/bin/env python
 
 import ConfigParser
-from mercurial import ui, util, cmdutil
+from mercurial import ui, util, dispatch
 
 testui = ui.ui()
-parsed = cmdutil.parseconfig([
+parsed = dispatch._parseconfig([
     'values.string=string value',
     'values.bool1=true',
     'values.bool2=false',
--- a/tests/test-ui-verbosity	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-ui-verbosity	Wed Feb 06 19:57:52 2008 -0800
@@ -4,6 +4,9 @@
 from mercurial import ui
 
 hgrc = os.environ['HGRCPATH']
+f = open(hgrc)
+basehgrc = f.read()
+f.close()
 
 print '      hgrc settings    command line options      final result   '
 print '    quiet verbo debug   quiet verbo debug      quiet verbo debug'
@@ -17,7 +20,8 @@
     cmd_debug    = bool(i & 1<<5)
 
     f = open(hgrc, 'w')
-    f.write('[ui]\n')
+    f.write(basehgrc)
+    f.write('\n[ui]\n')
     if hgrc_quiet:
         f.write('quiet = True\n')
     if hgrc_verbose:
--- a/tests/test-up-local-change.out	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-up-local-change.out	Wed Feb 06 19:57:52 2008 -0800
@@ -4,7 +4,7 @@
 diff -r 33aaa84a386b a
 --- a/a
 +++ b/a
-@@ -1,1 +1,1 @@ a
+@@ -1,1 +1,1 @@
 -a
 +abc
 adding b
@@ -17,8 +17,12 @@
 resolving manifests
  overwrite False partial False
  ancestor 33aaa84a386b local 33aaa84a386b+ remote 802f095af299
+  searching for copies back to rev 1
+  unmatched files in other:
+   b
  a: versions differ -> m
  b: remote created -> g
+picked tool 'true' for a (binary False symlink False)
 merging a
 my a@33aaa84a386b+ other a@802f095af299 ancestor a@33aaa84a386b
 getting b
@@ -50,8 +54,12 @@
 resolving manifests
  overwrite False partial False
  ancestor 33aaa84a386b local 33aaa84a386b+ remote 802f095af299
+  searching for copies back to rev 1
+  unmatched files in other:
+   b
  a: versions differ -> m
  b: remote created -> g
+picked tool 'true' for a (binary False symlink False)
 merging a
 my a@33aaa84a386b+ other a@802f095af299 ancestor a@33aaa84a386b
 getting b
@@ -82,7 +90,7 @@
 diff -r 802f095af299 a
 --- a/a
 +++ b/a
-@@ -1,1 +1,1 @@ a2
+@@ -1,1 +1,1 @@
 -a2
 +abc
 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
@@ -100,10 +108,13 @@
 resolving manifests
  overwrite False partial False
  ancestor 33aaa84a386b local 802f095af299+ remote 030602aee63d
+  searching for copies back to rev 1
  a: versions differ -> m
  b: versions differ -> m
+picked tool 'true' for a (binary False symlink False)
 merging a
 my a@802f095af299+ other a@030602aee63d ancestor a@33aaa84a386b
+picked tool 'true' for b (binary False symlink False)
 merging b
 my b@802f095af299+ other b@030602aee63d ancestor b@000000000000
 0 files updated, 2 files merged, 0 files removed, 0 files unresolved
@@ -123,7 +134,7 @@
 diff -r 802f095af299 a
 --- a/a
 +++ b/a
-@@ -1,1 +1,1 @@ a2
+@@ -1,1 +1,1 @@
 -a2
 +abc
 adding a
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-url-rev	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,83 @@
+#!/bin/sh
+# test basic functionality of url#rev syntax
+
+hg init repo
+cd repo
+echo a > a
+hg ci -qAm 'add a' -d '0 0'
+hg branch foo
+echo >> a
+hg ci -m 'change a' -d '0 0'
+cd ..
+
+echo '% clone repo#foo'
+hg clone 'repo#foo' clone
+echo '% heads'
+hg --cwd clone heads
+echo '% parents'
+hg --cwd clone parents
+sed -e 's/default.*#/default = #/' clone/.hg/hgrc
+echo
+
+echo '% changing original repo'
+cd repo
+echo >> a
+hg ci -m 'new head of branch foo' -d '0 0'
+hg up -qC default
+echo bar > bar
+hg ci -qAm 'add bar' -d '0 0'
+hg log
+echo
+
+echo '% outgoing'
+hg -q outgoing '../clone#foo'
+echo
+
+echo '% push'
+hg -q push '../clone#foo'
+hg --cwd ../clone heads
+cd ..
+echo
+
+echo '% rolling back'
+cd clone
+hg rollback
+
+echo '% incoming'
+hg -q incoming
+
+echo '% pull'
+hg -q pull
+hg heads
+echo
+
+echo '% pull should not have updated'
+hg parents -q
+echo '% going back to the default branch'
+hg up -C 0
+hg parents
+echo '% no new revs, no update'
+hg pull -qu
+hg parents -q
+echo '% rollback'
+hg rollback
+hg up -C 0
+hg parents -q
+echo '% pull -u takes us back to branch foo'
+hg pull -qu
+hg parents
+
+echo '% rollback'
+hg rollback
+hg up -C 0
+echo '% parents'
+hg parents -q
+echo '% heads'
+hg heads -q
+echo '% pull -u -r otherrev url#rev updates to rev'
+hg pull -qur default default
+echo '% parents'
+hg parents
+echo '% heads'
+hg heads
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-url-rev.out	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,130 @@
+marked working directory as branch foo
+% clone repo#foo
+requesting all changes
+adding changesets
+adding manifests
+adding file changes
+added 2 changesets with 2 changes to 1 files
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+% heads
+changeset:   1:cd2a86ecc814
+branch:      foo
+tag:         tip
+user:        test
+date:        Thu Jan 01 00:00:00 1970 +0000
+summary:     change a
+
+% parents
+changeset:   1:cd2a86ecc814
+branch:      foo
+tag:         tip
+user:        test
+date:        Thu Jan 01 00:00:00 1970 +0000
+summary:     change a
+
+[paths]
+default = #foo
+
+% changing original repo
+changeset:   3:4cd725637392
+tag:         tip
+parent:      0:1f0dee641bb7
+user:        test
+date:        Thu Jan 01 00:00:00 1970 +0000
+summary:     add bar
+
+changeset:   2:faba9097cad4
+branch:      foo
+user:        test
+date:        Thu Jan 01 00:00:00 1970 +0000
+summary:     new head of branch foo
+
+changeset:   1:cd2a86ecc814
+branch:      foo
+user:        test
+date:        Thu Jan 01 00:00:00 1970 +0000
+summary:     change a
+
+changeset:   0:1f0dee641bb7
+user:        test
+date:        Thu Jan 01 00:00:00 1970 +0000
+summary:     add a
+
+
+% outgoing
+2:faba9097cad4
+
+% push
+changeset:   2:faba9097cad4
+branch:      foo
+tag:         tip
+user:        test
+date:        Thu Jan 01 00:00:00 1970 +0000
+summary:     new head of branch foo
+
+
+% rolling back
+rolling back last transaction
+% incoming
+2:faba9097cad4
+% pull
+changeset:   2:faba9097cad4
+branch:      foo
+tag:         tip
+user:        test
+date:        Thu Jan 01 00:00:00 1970 +0000
+summary:     new head of branch foo
+
+
+% pull should not have updated
+1:cd2a86ecc814
+% going back to the default branch
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+changeset:   0:1f0dee641bb7
+user:        test
+date:        Thu Jan 01 00:00:00 1970 +0000
+summary:     add a
+
+% no new revs, no update
+0:1f0dee641bb7
+% rollback
+rolling back last transaction
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+0:1f0dee641bb7
+% pull -u takes us back to branch foo
+changeset:   2:faba9097cad4
+branch:      foo
+tag:         tip
+user:        test
+date:        Thu Jan 01 00:00:00 1970 +0000
+summary:     new head of branch foo
+
+% rollback
+rolling back last transaction
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+% parents
+0:1f0dee641bb7
+% heads
+1:cd2a86ecc814
+% pull -u -r otherrev url#rev updates to rev
+% parents
+changeset:   2:faba9097cad4
+branch:      foo
+user:        test
+date:        Thu Jan 01 00:00:00 1970 +0000
+summary:     new head of branch foo
+
+% heads
+changeset:   3:4cd725637392
+tag:         tip
+parent:      0:1f0dee641bb7
+user:        test
+date:        Thu Jan 01 00:00:00 1970 +0000
+summary:     add bar
+
+changeset:   2:faba9097cad4
+branch:      foo
+user:        test
+date:        Thu Jan 01 00:00:00 1970 +0000
+summary:     new head of branch foo
+
--- a/tests/test-walk	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-walk	Wed Feb 06 19:57:52 2008 -0800
@@ -58,6 +58,7 @@
 debugwalk ../.hg
 chdir ..
 debugwalk -Ibeans
+debugwalk -I '{*,{b,m}*/*}k'
 debugwalk 'glob:mammals/../beans/b*'
 debugwalk '-X*/Procyonidae' mammals
 debugwalk path:mammals
--- a/tests/test-walk.out	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-walk.out	Wed Feb 06 19:57:52 2008 -0800
@@ -174,6 +174,11 @@
 f  beans/pinto     beans/pinto
 f  beans/turtle    beans/turtle
 
+hg debugwalk -I {*,{b,m}*/*}k
+f  beans/black    beans/black
+f  fenugreek      fenugreek
+f  mammals/skunk  mammals/skunk
+
 hg debugwalk glob:mammals/../beans/b*
 f  beans/black     beans/black
 f  beans/borlotti  beans/borlotti
--- a/tests/test-webraw	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-webraw	Wed Feb 06 19:57:52 2008 -0800
@@ -10,9 +10,9 @@
 ENDSOME
 hg add sometext.txt
 hg commit -d "1 0" -m "Just some text"
-hg serve -p 20059 -A access.log -E error.log -d --pid-file=hg.pid
+hg serve -p $HGPORT -A access.log -E error.log -d --pid-file=hg.pid
 cat hg.pid >> $DAEMON_PIDS
-("$TESTDIR/get-with-headers.py" localhost:20059 '/?f=f165dc289438;file=sometext.txt;style=raw' content-type content-length content-disposition) >getoutput.txt &
+("$TESTDIR/get-with-headers.py" localhost:$HGPORT '/?f=f165dc289438;file=sometext.txt;style=raw' content-type content-length content-disposition) >getoutput.txt &
 
 sleep 5
 kill `cat hg.pid`
--- a/tests/test-webraw.out	Thu Jul 26 07:56:27 2007 -0400
+++ b/tests/test-webraw.out	Wed Feb 06 19:57:52 2008 -0800
@@ -1,7 +1,7 @@
 200 Script output follows
 content-type: text/plain
 content-length: 157
-content-disposition: filename=sometext.txt
+content-disposition: inline; filename=sometext.txt
 
 This is just some random text
 that will go inside the file and take a few lines.
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-win32text	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,101 @@
+#!/bin/sh
+
+cat > unix2dos.py <<EOF 
+import sys
+
+for path in sys.argv[1:]:
+    data = file(path, 'rb').read()
+    data = data.replace('\n', '\r\n')
+    file(path, 'wb').write(data)
+EOF
+
+cat > print.py <<EOF
+import sys
+print(sys.stdin.read().replace('\n', '<LF>').replace('\r', '<CR>').replace('\0', '<NUL>'))
+EOF
+
+hg init
+echo '[hooks]' >> .hg/hgrc
+echo 'pretxncommit.crlf = python:hgext.win32text.forbidcrlf' >> .hg/hgrc
+echo 'pretxnchangegroup.crlf = python:hgext.win32text.forbidcrlf' >> .hg/hgrc
+cat .hg/hgrc
+echo
+
+echo hello > f
+hg add f
+hg ci -m 1 -d'0 0'
+echo
+
+python unix2dos.py f
+hg ci -m 2 -d'0 0'
+hg revert -a
+echo
+
+mkdir d
+echo hello > d/f2
+python unix2dos.py d/f2
+hg add d/f2
+hg ci -m 3 -d'0 0'
+hg revert -a
+rm d/f2
+echo
+
+hg rem f
+hg ci -m 4 -d'0 0'
+echo
+
+python -c 'file("bin", "wb").write("hello\x00\x0D\x0A")'
+hg add bin
+hg ci -m 5 -d'0 0'
+hg log -v
+echo
+
+hg clone . dupe
+echo
+for x in a b c d; do echo content > dupe/$x; done
+hg -R dupe add
+python unix2dos.py dupe/b dupe/c dupe/d
+hg -R dupe ci -m a -d'0 0' dupe/a
+hg -R dupe ci -m b/c -d'0 0' dupe/[bc]
+hg -R dupe ci -m d -d'0 0' dupe/d
+hg -R dupe log -v
+echo
+
+hg pull dupe
+echo
+
+hg log -v
+echo
+
+rm .hg/hgrc
+(echo some; echo text) > f3
+python -c 'file("f4.bat", "wb").write("rem empty\x0D\x0A")'
+hg add f3 f4.bat
+hg ci -m 6 -d'0 0'
+
+python print.py < bin
+python print.py < f3
+python print.py < f4.bat
+echo
+
+echo '[extensions]' >> .hg/hgrc
+echo 'win32text = ' >> .hg/hgrc
+echo '[decode]' >> .hg/hgrc
+echo '** = cleverdecode:' >> .hg/hgrc
+echo '[encode]' >> .hg/hgrc
+echo '** = cleverencode:' >> .hg/hgrc
+cat .hg/hgrc
+echo
+
+rm f3 f4.bat bin
+hg co 2>&1 | python -c 'import sys, os; sys.stdout.write(sys.stdin.read().replace(os.getcwd(), "...."))'
+python print.py < bin
+python print.py < f3
+python print.py < f4.bat
+echo
+
+python -c 'file("f5.sh", "wb").write("# empty\x0D\x0A")'
+hg add f5.sh
+hg ci -m 7 -d'0 0'
+python print.py < f5.sh
+hg cat f5.sh | python print.py
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-win32text.out	Wed Feb 06 19:57:52 2008 -0800
@@ -0,0 +1,179 @@
+[hooks]
+pretxncommit.crlf = python:hgext.win32text.forbidcrlf
+pretxnchangegroup.crlf = python:hgext.win32text.forbidcrlf
+
+
+Attempt to commit or push text file(s) using CRLF line endings
+in b1aa5cde7ff4: f
+transaction abort!
+rollback completed
+abort: pretxncommit.crlf hook failed
+reverting f
+
+Attempt to commit or push text file(s) using CRLF line endings
+in 88b17af74937: d/f2
+transaction abort!
+rollback completed
+abort: pretxncommit.crlf hook failed
+forgetting d/f2
+
+
+changeset:   2:b67b2dae057a
+tag:         tip
+user:        test
+date:        Thu Jan 01 00:00:00 1970 +0000
+files:       bin
+description:
+5
+
+
+changeset:   1:c72a7d1d0907
+user:        test
+date:        Thu Jan 01 00:00:00 1970 +0000
+files:       f
+description:
+4
+
+
+changeset:   0:fcf06d5c4e1d
+user:        test
+date:        Thu Jan 01 00:00:00 1970 +0000
+files:       f
+description:
+1
+
+
+
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+
+adding dupe/a
+adding dupe/b
+adding dupe/c
+adding dupe/d
+changeset:   5:6e8a7629ff5b
+tag:         tip
+user:        test
+date:        Thu Jan 01 00:00:00 1970 +0000
+files:       d
+description:
+d
+
+
+changeset:   4:ac30a42ce8bc
+user:        test
+date:        Thu Jan 01 00:00:00 1970 +0000
+files:       b c
+description:
+b/c
+
+
+changeset:   3:a73b85ef1fb7
+user:        test
+date:        Thu Jan 01 00:00:00 1970 +0000
+files:       a
+description:
+a
+
+
+changeset:   2:b67b2dae057a
+user:        test
+date:        Thu Jan 01 00:00:00 1970 +0000
+files:       bin
+description:
+5
+
+
+changeset:   1:c72a7d1d0907
+user:        test
+date:        Thu Jan 01 00:00:00 1970 +0000
+files:       f
+description:
+4
+
+
+changeset:   0:fcf06d5c4e1d
+user:        test
+date:        Thu Jan 01 00:00:00 1970 +0000
+files:       f
+description:
+1
+
+
+
+pulling from dupe
+searching for changes
+adding changesets
+adding manifests
+adding file changes
+added 3 changesets with 4 changes to 4 files
+Attempt to commit or push text file(s) using CRLF line endings
+in ac30a42ce8bc: b
+in ac30a42ce8bc: c
+in 6e8a7629ff5b: d
+
+To prevent this mistake in your local repository,
+add to Mercurial.ini or .hg/hgrc:
+
+[hooks]
+pretxncommit.crlf = python:hgext.win32text.forbidcrlf
+
+and also consider adding:
+
+[extensions]
+hgext.win32text =
+[encode]
+** = cleverencode:
+[decode]
+** = cleverdecode:
+transaction abort!
+rollback completed
+abort: pretxnchangegroup.crlf hook failed
+
+changeset:   2:b67b2dae057a
+tag:         tip
+user:        test
+date:        Thu Jan 01 00:00:00 1970 +0000
+files:       bin
+description:
+5
+
+
+changeset:   1:c72a7d1d0907
+user:        test
+date:        Thu Jan 01 00:00:00 1970 +0000
+files:       f
+description:
+4
+
+
+changeset:   0:fcf06d5c4e1d
+user:        test
+date:        Thu Jan 01 00:00:00 1970 +0000
+files:       f
+description:
+1
+
+
+
+hello<NUL><CR><LF>
+some<LF>text<LF>
+rem empty<CR><LF>
+
+[extensions]
+win32text = 
+[decode]
+** = cleverdecode:
+[encode]
+** = cleverencode:
+
+WARNING: f4.bat already has CRLF line endings
+and does not need EOL conversion by the win32text plugin.
+Before your next commit, please reconsider your encode/decode settings in 
+Mercurial.ini or ..../.hg/hgrc.
+3 files updated, 0 files merged, 0 files removed, 0 files unresolved
+hello<NUL><CR><LF>
+some<CR><LF>text<CR><LF>
+rem empty<CR><LF>
+
+# empty<CR><LF>
+# empty<LF>